max_stars_count
int64
301
224k
text
stringlengths
6
1.05M
token_count
int64
3
727k
449
<gh_stars>100-1000 from unfurl.core import Unfurl import unittest class TestBing(unittest.TestCase): def test_bing(self): """ Test a typical and a unique Bing url """ # test a Bing search url test = Unfurl() test.add_to_queue( data_type='url', key=None, value='https://www.bing.com/search?q=digital+forensics&qs=n&form=QBLH&sp=-1' '&pq=digital+forensic&sc=8-16&sk=&cvid=77BF13B59CF84B98B13C067AAA3DB701') test.parse_queue() # test number of nodes self.assertEqual(len(test.nodes.keys()), 23) self.assertEqual(test.total_nodes, 23) # Test query parsing self.assertEqual('q: digital forensics', test.nodes[9].label) # is processing finished empty self.assertTrue(test.queue.empty()) self.assertEqual(len(test.edges), 0) if __name__ == '__main__': unittest.main()
440
13,937
<gh_stars>1000+ [{"id":"2d14f1a7.e776ce","type":"comment","z":"39759fee.3ffb6","name":"Slow down messages passing through a flow","info":"Delay node can be used to slow down messages passing through a flow.\n\nSee Node-RED cookbook [item](https://cookbook.nodered.org/basic/rate-limit-messages).","x":230,"y":80,"wires":[]},{"id":"af78b43e.9817d8","type":"inject","z":"39759fee.3ffb6","name":"Inject Array","repeat":"","crontab":"","once":false,"onceDelay":0.1,"topic":"","payload":"[0,1,2,3,4,5,6,7,8,9]","payloadType":"json","x":190,"y":140,"wires":[["a35943e3.eaf0a"]]},{"id":"a35943e3.eaf0a","type":"split","z":"39759fee.3ffb6","name":"","splt":"\\n","spltType":"str","arraySplt":1,"arraySpltType":"len","stream":false,"addname":"","x":330,"y":140,"wires":[["23eacc60.7290a4"]]},{"id":"23eacc60.7290a4","type":"delay","z":"39759fee.3ffb6","name":"","pauseType":"rate","timeout":"5","timeoutUnits":"seconds","rate":"1","nbRateUnits":"1","rateUnits":"second","randomFirst":"1","randomLast":"5","randomUnits":"seconds","drop":false,"x":470,"y":140,"wires":[["b5b7746a.53bf88"]]},{"id":"b5b7746a.53bf88","type":"debug","z":"39759fee.3ffb6","name":"Debug","active":true,"tosidebar":true,"console":false,"tostatus":false,"complete":"payload","targetType":"msg","x":610,"y":140,"wires":[]}]
492
14,668
<filename>content/browser/devtools/devtools_issue_storage.cc // Copyright 2020 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "content/browser/devtools/devtools_issue_storage.h" #include "content/browser/devtools/protocol/audits.h" #include "content/browser/devtools/render_frame_devtools_agent_host.h" #include "content/browser/renderer_host/render_frame_host_impl.h" #include "content/public/browser/navigation_handle.h" #include "content/public/browser/web_contents.h" #include "ui/base/page_transition_types.h" namespace content { static const unsigned kMaxIssueCount = 1000; PAGE_USER_DATA_KEY_IMPL(DevToolsIssueStorage); DevToolsIssueStorage::DevToolsIssueStorage(Page& page) : PageUserData<DevToolsIssueStorage>(page) {} DevToolsIssueStorage::~DevToolsIssueStorage() = default; void DevToolsIssueStorage::AddInspectorIssue( RenderFrameHost* rfh, std::unique_ptr<protocol::Audits::InspectorIssue> issue) { DCHECK_LE(issues_.size(), kMaxIssueCount); if (issues_.size() == kMaxIssueCount) { issues_.pop_front(); } issues_.emplace_back(rfh->GetGlobalId(), std::move(issue)); } std::vector<const protocol::Audits::InspectorIssue*> DevToolsIssueStorage::FindIssuesForAgentOf( RenderFrameHost* render_frame_host) const { RenderFrameHostImpl* render_frame_host_impl = static_cast<RenderFrameHostImpl*>(render_frame_host); RenderFrameHostImpl* main_rfh = static_cast<RenderFrameHostImpl*>(&page().GetMainDocument()); DevToolsAgentHostImpl* agent_host = RenderFrameDevToolsAgentHost::GetFor(render_frame_host_impl); DCHECK_EQ(&render_frame_host->GetPage(), &page()); DCHECK(RenderFrameDevToolsAgentHost::ShouldCreateDevToolsForHost( render_frame_host_impl)); DCHECK(agent_host); bool is_main_agent = render_frame_host_impl == main_rfh; std::vector<const protocol::Audits::InspectorIssue*> issues; for (const auto& entry : issues_) { bool should_add; RenderFrameHostImpl* issue_rfh = RenderFrameHostImpl::FromID(entry.first); if (!issue_rfh) { // Issues that fall in this category are either associated with |main_rfh| // or with deleted subframe RFHs of |main_rfh|. In both cases, we only // want to retrieve them for |main_rfh|'s agent. // Note: This means that issues for deleted subframe RFHs get reparented // to |main_rfh| after deletion. should_add = is_main_agent; } else { should_add = RenderFrameDevToolsAgentHost::GetFor(issue_rfh) == agent_host; } if (should_add) issues.push_back(entry.second.get()); } return issues; } } // namespace content
949
2,151
<reponame>zipated/src // Copyright 2018 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "ash/wm/wm_shadow_controller_delegate.h" #include "ash/shell.h" #include "ash/wm/overview/window_selector.h" #include "ash/wm/overview/window_selector_controller.h" #include "ash/wm/splitview/split_view_controller.h" #include "ui/aura/window.h" namespace ash { WmShadowControllerDelegate::WmShadowControllerDelegate() = default; WmShadowControllerDelegate::~WmShadowControllerDelegate() = default; bool WmShadowControllerDelegate::ShouldShowShadowForWindow( const aura::Window* window) { SplitViewController* split_view_controller = Shell::Get()->split_view_controller(); if (!split_view_controller) return true; // Hide the shadow if it is one of the splitscreen snapped windows. if (window == split_view_controller->left_window() || window == split_view_controller->right_window()) { return false; } // Hide the shadow while we are in overview mode. WindowSelectorController* window_selector_controller = Shell::Get()->window_selector_controller(); if (!window_selector_controller || !window_selector_controller->IsSelecting()) return true; WindowSelector* window_selector = window_selector_controller->window_selector(); DCHECK(window_selector); return window_selector->IsShuttingDown() || !window_selector->IsWindowInOverview(window); } } // namespace ash
491
852
import FWCore.ParameterSet.Config as cms # The line below always has to be included to make VarParsing work from FWCore.ParameterSet.VarParsing import VarParsing options = VarParsing ('analysis') options.parseArguments() process = cms.Process("Demo") process.load("FWCore.MessageService.MessageLogger_cfi") process.load('Configuration.Geometry.GeometryExtended2026D49Reco_cff') process.load('Configuration.StandardSequences.FrontierConditions_GlobalTag_cff') from Configuration.AlCa.GlobalTag import GlobalTag process.GlobalTag = GlobalTag(process.GlobalTag, 'auto:phase2_realistic', '') process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32(-1) ) input_filename = 'default.root' if len(options.inputFiles) == 0 else options.inputFiles[0] #input_filename='step2SingleElectronPt15Eta1p7_2p7_SimTracksters.root' #input_filename='step2FineCaloSingleElectronPt15Eta1p7_2p7_SimTracksters.root' #input_filename='step2SingleElectronPt15Eta1p7_2p7_CBWEAndSimTracksters.root' #input_filename='step2FineCaloSingleElectronPt15Eta1p7_2p7_CBWEAndSimTracksters.root' process.source = cms.Source("PoolSource", inputCommands = cms.untracked.vstring(['keep *', 'drop l1tEMTFHit2016Extras_simEmtfDigis_CSC_HLT', 'drop l1tEMTFHit2016Extras_simEmtfDigis_RPC_HLT', 'drop l1tEMTFHit2016s_simEmtfDigis__HLT', 'drop l1tEMTFTrack2016Extras_simEmtfDigis__HLT', 'drop l1tEMTFTrack2016s_simEmtfDigis__HLT']), # replace 'myfile.root' with the source file you want to use fileNames = cms.untracked.vstring( # 'file:/data/rovere/HGCAL/study/CMSSW_9_4_0/src/SimGeneral/Debugging/test/20800.0_FourMuPt1_200+FourMuPt_1_200_pythia8_2023D20_GenSimHLBeamSpotFull+DigiFull_2023D20+RecoFullGlobal_2023D20+HARVESTFullGlobal_2023D20/step2.root' # 'file:/data/rovere/HGCAL/study/CMSSW_9_4_0/src/SimGeneral/Debugging/test/20824.0_TTbar_13+TTbar_13TeV_TuneCUETP8M1_2023D20_GenSimHLBeamSpotFull+DigiFull_2023D20+RecoFullGlobal_2023D20+HARVESTFullGlobal_2023D20/step2.root' # 'file:/data/rovere/HGCAL/study/CMSSW_9_4_0/src/SimGeneral/Debugging/test/20002.0_SingleElectronPt35+SingleElectronPt35_pythia8_2023D17_GenSimHLBeamSpotFull+DigiFullTrigger_2023D17+RecoFullGlobal_2023D17+HARVESTFullGlobal_2023D17/step2.root' # 'file:/data/rovere/HGCAL/study/CMSSW_9_4_0/src/SimGeneral/Debugging/test/20016.0_SingleGammaPt35Extended+DoubleGammaPt35Extended_pythia8_2023D17_GenSimHLBeamSpotFull+DigiFullTrigger_2023D17+RecoFullGlobal_2023D17+HARVESTFullGlobal_2023D17/step2.root' # 'file:/data/rovere/HGCAL/study/CMSSW_9_4_0/src/SimGeneral/Debugging/test/20088.0_SinglePiPt25Eta1p7_2p7+SinglePiPt25Eta1p7_2p7_2023D17_GenSimHLBeamSpotFull+DigiFullTrigger_2023D17+RecoFullGlobal_2023D17+HARVESTFullGlobal_2023D17/step2.root' 'file:%s'%input_filename ) ) process.load("SimGeneral.Debugging.caloParticleDebugger_cfi") # MessageLogger customizations process.MessageLogger.cerr.enable = False process.MessageLogger.cout.enable = False labels = ['SimTracks', 'SimVertices', 'GenParticles', 'TrackingParticles', 'CaloParticles', 'SimClusters'] messageLogger = dict() for category in labels: main_key = '%sMessageLogger'%(category) category_key = 'CaloParticleDebugger%s'%(category) messageLogger[main_key] = dict( filename = '%s_%s.log' % (input_filename.replace('.root',''), category), threshold = 'INFO', default = dict(limit=0) ) messageLogger[main_key][category_key] = dict(limit=-1) # First create defaults setattr(process.MessageLogger.files, category, dict()) # Then modify them setattr(process.MessageLogger.files, category, messageLogger[main_key]) process.p = cms.Path(process.caloParticleDebugger)
1,748
316
/* ============================================================================== This is an automatically generated GUI class created by the Projucer! Be careful when adding custom code to these files, as only the code within the "//[xyz]" and "//[/xyz]" sections will be retained when the file is loaded and re-saved. Created with Projucer version: 5.3.2 ------------------------------------------------------------------------------ The Projucer is part of the JUCE library. Copyright (c) 2017 - ROLI Ltd. ============================================================================== */ //[Headers] You can add your own extra header files here... #include <fmt/format.h> //[/Headers] #include "new_program_editor.h" //[MiscUserDefs] You can add your own user definitions and misc code here... //[/MiscUserDefs] //============================================================================== New_Program_Editor::New_Program_Editor () { //[Constructor_pre] You can add your own custom stuff here.. //[/Constructor_pre] label.reset (new Label ("new label", TRANS("Program"))); addAndMakeVisible (label.get()); label->setFont (Font (15.0f, Font::plain).withTypefaceStyle ("Regular")); label->setJustificationType (Justification::centredLeft); label->setEditable (false, false, false); label->setColour (Label::textColourId, Colours::aliceblue); label->setColour (TextEditor::textColourId, Colours::black); label->setColour (TextEditor::backgroundColourId, Colour (0x00000000)); label->setBounds (8, 48, 64, 24); label2.reset (new Label ("new label", TRANS("Bank"))); addAndMakeVisible (label2.get()); label2->setFont (Font (15.0f, Font::plain).withTypefaceStyle ("Regular")); label2->setJustificationType (Justification::centredLeft); label2->setEditable (false, false, false); label2->setColour (Label::textColourId, Colours::aliceblue); label2->setColour (TextEditor::textColourId, Colours::black); label2->setColour (TextEditor::backgroundColourId, Colour (0x00000000)); label2->setBounds (8, 8, 64, 24); btn_ok.reset (new TextButton ("new button")); addAndMakeVisible (btn_ok.get()); btn_ok->setButtonText (TRANS("OK")); btn_ok->addListener (this); btn_ok->setBounds (130, 86, 70, 24); btn_cancel.reset (new TextButton ("new button")); addAndMakeVisible (btn_cancel.get()); btn_cancel->setButtonText (TRANS("Cancel")); btn_cancel->addListener (this); btn_cancel->setBounds (218, 86, 70, 24); cb_pgm_kind.reset (new ComboBox ("new combo box")); addAndMakeVisible (cb_pgm_kind.get()); cb_pgm_kind->setEditableText (false); cb_pgm_kind->setJustificationType (Justification::centredLeft); cb_pgm_kind->setTextWhenNothingSelected (String()); cb_pgm_kind->setTextWhenNoChoicesAvailable (TRANS("(no choices)")); cb_pgm_kind->addListener (this); cb_pgm_kind->setBounds (96, 48, 144, 24); edt_pgm_num.reset (new TextEditor ("new text editor")); addAndMakeVisible (edt_pgm_num.get()); edt_pgm_num->setMultiLine (false); edt_pgm_num->setReturnKeyStartsNewLine (false); edt_pgm_num->setReadOnly (false); edt_pgm_num->setScrollbarsShown (true); edt_pgm_num->setCaretVisible (true); edt_pgm_num->setPopupMenuEnabled (true); edt_pgm_num->setText (TRANS("000")); edt_pgm_num->setBounds (248, 48, 40, 24); edt_bank_msb.reset (new TextEditor ("new text editor")); addAndMakeVisible (edt_bank_msb.get()); edt_bank_msb->setMultiLine (false); edt_bank_msb->setReturnKeyStartsNewLine (false); edt_bank_msb->setReadOnly (false); edt_bank_msb->setScrollbarsShown (true); edt_bank_msb->setCaretVisible (true); edt_bank_msb->setPopupMenuEnabled (true); edt_bank_msb->setText (TRANS("000")); edt_bank_msb->setBounds (96, 8, 40, 24); edt_bank_lsb.reset (new TextEditor ("new text editor")); addAndMakeVisible (edt_bank_lsb.get()); edt_bank_lsb->setMultiLine (false); edt_bank_lsb->setReturnKeyStartsNewLine (false); edt_bank_lsb->setReadOnly (false); edt_bank_lsb->setScrollbarsShown (true); edt_bank_lsb->setCaretVisible (true); edt_bank_lsb->setPopupMenuEnabled (true); edt_bank_lsb->setText (TRANS("000")); edt_bank_lsb->setBounds (152, 8, 40, 24); label3.reset (new Label ("new label", TRANS(":"))); addAndMakeVisible (label3.get()); label3->setFont (Font (15.0f, Font::plain).withTypefaceStyle ("Regular")); label3->setJustificationType (Justification::centredLeft); label3->setEditable (false, false, false); label3->setColour (Label::textColourId, Colours::aliceblue); label3->setColour (TextEditor::textColourId, Colours::black); label3->setColour (TextEditor::backgroundColourId, Colour (0x00000000)); label3->setBounds (136, 8, 16, 24); //[UserPreSize] #if JUCE_MAC { Rectangle<int> bounds_ok = btn_ok->getBounds(); Rectangle<int> bounds_cancel = btn_cancel->getBounds(); btn_ok->setBounds(bounds_cancel); btn_cancel->setBounds(bounds_ok); } #endif edt_bank_msb->setJustification(Justification::centred); edt_bank_lsb->setJustification(Justification::centred); edt_pgm_num->setJustification(Justification::centred); cb_pgm_kind->addItem("Melodic", 1); cb_pgm_kind->addItem("Percussive", 2); cb_pgm_kind->setSelectedId(1); //[/UserPreSize] setSize (300, 120); //[Constructor] You can add your own custom stuff here.. //[/Constructor] } New_Program_Editor::~New_Program_Editor() { //[Destructor_pre]. You can add your own custom destruction code here.. //[/Destructor_pre] label = nullptr; label2 = nullptr; btn_ok = nullptr; btn_cancel = nullptr; cb_pgm_kind = nullptr; edt_pgm_num = nullptr; edt_bank_msb = nullptr; edt_bank_lsb = nullptr; label3 = nullptr; //[Destructor]. You can add your own custom destruction code here.. //[/Destructor] } //============================================================================== void New_Program_Editor::paint (Graphics& g) { //[UserPrePaint] Add your own custom painting code here.. //[/UserPrePaint] g.fillAll (Colour (0xff323e44)); //[UserPaint] Add your own custom painting code here.. //[/UserPaint] } void New_Program_Editor::resized() { //[UserPreResize] Add your own custom resize code here.. //[/UserPreResize] //[UserResized] Add your own custom resize handling here.. //[/UserResized] } void New_Program_Editor::buttonClicked (Button* buttonThatWasClicked) { //[UserbuttonClicked_Pre] //[/UserbuttonClicked_Pre] if (buttonThatWasClicked == btn_ok.get()) { //[UserButtonCode_btn_ok] -- add your button handler code here.. if (on_ok) { auto to_uint7 = [](const char *str) -> unsigned { unsigned result, count; return (sscanf(str, "%u%n", &result, &count) == 1 && (result < 128) && count == strlen(str)) ? result : ~0u; }; unsigned msb = to_uint7(edt_bank_msb->getText().toRawUTF8()); unsigned lsb = to_uint7(edt_bank_lsb->getText().toRawUTF8()); unsigned pgm = to_uint7(edt_pgm_num->getText().toRawUTF8()); if (msb == ~0u || lsb == ~0u || pgm == ~0u) AlertWindow::showMessageBox( AlertWindow::WarningIcon, "Invalid value", "Identifiers must be integers between 0 and 127."); else { Result result; result.bank = Bank_Id(msb, lsb, cb_pgm_kind->getSelectedId() - 1); result.pgm = pgm; on_ok(result); } } //[/UserButtonCode_btn_ok] } else if (buttonThatWasClicked == btn_cancel.get()) { //[UserButtonCode_btn_cancel] -- add your button handler code here.. if (on_cancel) on_cancel(); //[/UserButtonCode_btn_cancel] } //[UserbuttonClicked_Post] //[/UserbuttonClicked_Post] } void New_Program_Editor::comboBoxChanged (ComboBox* comboBoxThatHasChanged) { //[UsercomboBoxChanged_Pre] //[/UsercomboBoxChanged_Pre] if (comboBoxThatHasChanged == cb_pgm_kind.get()) { //[UserComboBoxCode_cb_pgm_kind] -- add your combo box handling code here.. //[/UserComboBoxCode_cb_pgm_kind] } //[UsercomboBoxChanged_Post] //[/UsercomboBoxChanged_Post] } //[MiscUserCode] You can add your own definitions of your custom methods or any other code here... void New_Program_Editor::set_current(const Bank_Id &id, unsigned pgm) { cb_pgm_kind->setSelectedId(id.percussive ? 2 : 1); edt_pgm_num->setText(fmt::format("{:03d}", pgm)); edt_bank_msb->setText(fmt::format("{:03d}", id.msb)); edt_bank_lsb->setText(fmt::format("{:03d}", id.lsb)); } //[/MiscUserCode] //============================================================================== #if 0 /* -- Projucer information section -- This is where the Projucer stores the metadata that describe this GUI layout, so make changes in here at your peril! BEGIN_JUCER_METADATA <JUCER_COMPONENT documentType="Component" className="New_Program_Editor" componentName="" parentClasses="public Component" constructorParams="" variableInitialisers="" snapPixels="8" snapActive="1" snapShown="1" overlayOpacity="0.33" fixedSize="1" initialWidth="300" initialHeight="120"> <BACKGROUND backgroundColour="ff323e44"/> <LABEL name="new label" id="160fefc52427eaeb" memberName="label" virtualName="" explicitFocusOrder="0" pos="8 48 64 24" textCol="fff0f8ff" edTextCol="ff000000" edBkgCol="0" labelText="Program" editableSingleClick="0" editableDoubleClick="0" focusDiscardsChanges="0" fontname="Default font" fontsize="15.0" kerning="0.0" bold="0" italic="0" justification="33"/> <LABEL name="new label" id="3165a7da9bc7b3e" memberName="label2" virtualName="" explicitFocusOrder="0" pos="8 8 64 24" textCol="fff0f8ff" edTextCol="ff000000" edBkgCol="0" labelText="Bank" editableSingleClick="0" editableDoubleClick="0" focusDiscardsChanges="0" fontname="Default font" fontsize="15.0" kerning="0.0" bold="0" italic="0" justification="33"/> <TEXTBUTTON name="new button" id="a00526bae3e43a6a" memberName="btn_ok" virtualName="" explicitFocusOrder="0" pos="130 86 70 24" buttonText="OK" connectedEdges="0" needsCallback="1" radioGroupId="0"/> <TEXTBUTTON name="new button" id="d5f3c6d4a0271367" memberName="btn_cancel" virtualName="" explicitFocusOrder="0" pos="218 86 70 24" buttonText="Cancel" connectedEdges="0" needsCallback="1" radioGroupId="0"/> <COMBOBOX name="new combo box" id="8578b227b06134ff" memberName="cb_pgm_kind" virtualName="" explicitFocusOrder="0" pos="96 48 144 24" editable="0" layout="33" items="" textWhenNonSelected="" textWhenNoItems="(no choices)"/> <TEXTEDITOR name="new text editor" id="f55e80ae90a127d0" memberName="edt_pgm_num" virtualName="" explicitFocusOrder="0" pos="248 48 40 24" initialText="000" multiline="0" retKeyStartsLine="0" readonly="0" scrollbars="1" caret="1" popupmenu="1"/> <TEXTEDITOR name="new text editor" id="b0ab631d0df87485" memberName="edt_bank_msb" virtualName="" explicitFocusOrder="0" pos="96 8 40 24" initialText="000" multiline="0" retKeyStartsLine="0" readonly="0" scrollbars="1" caret="1" popupmenu="1"/> <TEXTEDITOR name="new text editor" id="4d0c7f4509556565" memberName="edt_bank_lsb" virtualName="" explicitFocusOrder="0" pos="152 8 40 24" initialText="000" multiline="0" retKeyStartsLine="0" readonly="0" scrollbars="1" caret="1" popupmenu="1"/> <LABEL name="new label" id="f12ad2cea8128443" memberName="label3" virtualName="" explicitFocusOrder="0" pos="136 8 16 24" textCol="fff0f8ff" edTextCol="ff000000" edBkgCol="0" labelText=":" editableSingleClick="0" editableDoubleClick="0" focusDiscardsChanges="0" fontname="Default font" fontsize="15.0" kerning="0.0" bold="0" italic="0" justification="33"/> </JUCER_COMPONENT> END_JUCER_METADATA */ #endif //[EndFile] You can add extra defines here... //[/EndFile]
5,557
1,821
<reponame>hangqiu/pixie<gh_stars>1000+ /* * Copyright 2018- The Pixie Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * SPDX-License-Identifier: Apache-2.0 */ #pragma once #include <farmhash.h> #include <cstdint> #include "src/shared/types/types.h" namespace px { namespace types { namespace utils { // PL_CARNOT_UPDATE_FOR_NEW_TYPES template <typename T> struct hash {}; template <> struct hash<BoolValue> { uint64_t operator()(BoolValue val) { return static_cast<uint64_t>(val.val); } }; template <> struct hash<Int64Value> { uint64_t operator()(Int64Value val) { return ::util::Hash64(reinterpret_cast<const char*>(&(val.val)), sizeof(int64_t)); } }; template <> struct hash<Float64Value> { uint64_t operator()(Float64Value val) { return ::util::Hash64(reinterpret_cast<const char*>(&(val.val)), sizeof(double)); } }; template <> struct hash<StringValue> { uint64_t operator()(StringValue val) { return ::util::Hash64(val); } }; template <> struct hash<Time64NSValue> { uint64_t operator()(Time64NSValue val) { return ::util::Hash64(reinterpret_cast<const char*>(&(val)), sizeof(Time64NSValue)); } }; } // namespace utils } // namespace types } // namespace px
584
1,379
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # this is modified from SpeechBrain # https://github.com/speechbrain/speechbrain/blob/085be635c07f16d42cd1295045bc46c407f1e15b/speechbrain/lobes/augment.py import math import os from typing import List import numpy as np import paddle import paddle.nn as nn import paddle.nn.functional as F from paddlespeech.s2t.utils.log import Log from paddlespeech.vector.io.dataset import CSVDataset from paddlespeech.vector.io.signal_processing import compute_amplitude from paddlespeech.vector.io.signal_processing import convolve1d from paddlespeech.vector.io.signal_processing import dB_to_amplitude from paddlespeech.vector.io.signal_processing import notch_filter from paddlespeech.vector.io.signal_processing import reverberate logger = Log(__name__).getlog() # TODO: Complete type-hint and doc string. class DropFreq(nn.Layer): def __init__( self, drop_freq_low=1e-14, drop_freq_high=1, drop_count_low=1, drop_count_high=2, drop_width=0.05, drop_prob=1, ): super(DropFreq, self).__init__() self.drop_freq_low = drop_freq_low self.drop_freq_high = drop_freq_high self.drop_count_low = drop_count_low self.drop_count_high = drop_count_high self.drop_width = drop_width self.drop_prob = drop_prob def forward(self, waveforms): # Don't drop (return early) 1-`drop_prob` portion of the batches dropped_waveform = waveforms.clone() if paddle.rand([1]) > self.drop_prob: return dropped_waveform # Add channels dimension if len(waveforms.shape) == 2: dropped_waveform = dropped_waveform.unsqueeze(-1) # Pick number of frequencies to drop drop_count = paddle.randint( low=self.drop_count_low, high=self.drop_count_high + 1, shape=[1]) # Pick a frequency to drop drop_range = self.drop_freq_high - self.drop_freq_low drop_frequency = ( paddle.rand([drop_count]) * drop_range + self.drop_freq_low) # Filter parameters filter_length = 101 pad = filter_length // 2 # Start with delta function drop_filter = paddle.zeros([1, filter_length, 1]) drop_filter[0, pad, 0] = 1 # Subtract each frequency for frequency in drop_frequency: notch_kernel = notch_filter(frequency, filter_length, self.drop_width) drop_filter = convolve1d(drop_filter, notch_kernel, pad) # Apply filter dropped_waveform = convolve1d(dropped_waveform, drop_filter, pad) # Remove channels dimension if added return dropped_waveform.squeeze(-1) class DropChunk(nn.Layer): def __init__( self, drop_length_low=100, drop_length_high=1000, drop_count_low=1, drop_count_high=10, drop_start=0, drop_end=None, drop_prob=1, noise_factor=0.0, ): super(DropChunk, self).__init__() self.drop_length_low = drop_length_low self.drop_length_high = drop_length_high self.drop_count_low = drop_count_low self.drop_count_high = drop_count_high self.drop_start = drop_start self.drop_end = drop_end self.drop_prob = drop_prob self.noise_factor = noise_factor # Validate low < high if drop_length_low > drop_length_high: raise ValueError("Low limit must not be more than high limit") if drop_count_low > drop_count_high: raise ValueError("Low limit must not be more than high limit") # Make sure the length doesn't exceed end - start if drop_end is not None and drop_end >= 0: if drop_start > drop_end: raise ValueError("Low limit must not be more than high limit") drop_range = drop_end - drop_start self.drop_length_low = min(drop_length_low, drop_range) self.drop_length_high = min(drop_length_high, drop_range) def forward(self, waveforms, lengths): # Reading input list lengths = (lengths * waveforms.shape[1]).astype('int64') batch_size = waveforms.shape[0] dropped_waveform = waveforms.clone() # Don't drop (return early) 1-`drop_prob` portion of the batches if paddle.rand([1]) > self.drop_prob: return dropped_waveform # Store original amplitude for computing white noise amplitude clean_amplitude = compute_amplitude(waveforms, lengths.unsqueeze(1)) # Pick a number of times to drop drop_times = paddle.randint( low=self.drop_count_low, high=self.drop_count_high + 1, shape=[batch_size], ) # Iterate batch to set mask for i in range(batch_size): if drop_times[i] == 0: continue # Pick lengths length = paddle.randint( low=self.drop_length_low, high=self.drop_length_high + 1, shape=[drop_times[i]], ) # Compute range of starting locations start_min = self.drop_start if start_min < 0: start_min += lengths[i] start_max = self.drop_end if start_max is None: start_max = lengths[i] if start_max < 0: start_max += lengths[i] start_max = max(0, start_max - length.max()) # Pick starting locations start = paddle.randint( low=start_min, high=start_max + 1, shape=[drop_times[i]], ) end = start + length # Update waveform if not self.noise_factor: for j in range(drop_times[i]): if start[j] < end[j]: dropped_waveform[i, start[j]:end[j]] = 0.0 else: # Uniform distribution of -2 to +2 * avg amplitude should # preserve the average for normalization noise_max = 2 * clean_amplitude[i] * self.noise_factor for j in range(drop_times[i]): # zero-center the noise distribution noise_vec = paddle.rand([length[j]], dtype='float32') noise_vec = 2 * noise_max * noise_vec - noise_max dropped_waveform[i, int(start[j]):int(end[j])] = noise_vec return dropped_waveform class Resample(nn.Layer): def __init__( self, orig_freq=16000, new_freq=16000, lowpass_filter_width=6, ): super(Resample, self).__init__() self.orig_freq = orig_freq self.new_freq = new_freq self.lowpass_filter_width = lowpass_filter_width # Compute rate for striding self._compute_strides() assert self.orig_freq % self.conv_stride == 0 assert self.new_freq % self.conv_transpose_stride == 0 def _compute_strides(self): # Compute new unit based on ratio of in/out frequencies base_freq = math.gcd(self.orig_freq, self.new_freq) input_samples_in_unit = self.orig_freq // base_freq self.output_samples = self.new_freq // base_freq # Store the appropriate stride based on the new units self.conv_stride = input_samples_in_unit self.conv_transpose_stride = self.output_samples def forward(self, waveforms): if not hasattr(self, "first_indices"): self._indices_and_weights(waveforms) # Don't do anything if the frequencies are the same if self.orig_freq == self.new_freq: return waveforms unsqueezed = False if len(waveforms.shape) == 2: waveforms = waveforms.unsqueeze(1) unsqueezed = True elif len(waveforms.shape) == 3: waveforms = waveforms.transpose([0, 2, 1]) else: raise ValueError("Input must be 2 or 3 dimensions") # Do resampling resampled_waveform = self._perform_resample(waveforms) if unsqueezed: resampled_waveform = resampled_waveform.squeeze(1) else: resampled_waveform = resampled_waveform.transpose([0, 2, 1]) return resampled_waveform def _perform_resample(self, waveforms): # Compute output size and initialize batch_size, num_channels, wave_len = waveforms.shape window_size = self.weights.shape[1] tot_output_samp = self._output_samples(wave_len) resampled_waveform = paddle.zeros((batch_size, num_channels, tot_output_samp)) # eye size: (num_channels, num_channels, 1) eye = paddle.eye(num_channels).unsqueeze(2) # Iterate over the phases in the polyphase filter for i in range(self.first_indices.shape[0]): wave_to_conv = waveforms first_index = int(self.first_indices[i].item()) if first_index >= 0: # trim the signal as the filter will not be applied # before the first_index wave_to_conv = wave_to_conv[:, :, first_index:] # pad the right of the signal to allow partial convolutions # meaning compute values for partial windows (e.g. end of the # window is outside the signal length) max_index = (tot_output_samp - 1) // self.output_samples end_index = max_index * self.conv_stride + window_size current_wave_len = wave_len - first_index right_padding = max(0, end_index + 1 - current_wave_len) left_padding = max(0, -first_index) wave_to_conv = paddle.nn.functional.pad( wave_to_conv, [left_padding, right_padding], data_format='NCL') conv_wave = paddle.nn.functional.conv1d( x=wave_to_conv, # weight=self.weights[i].repeat(num_channels, 1, 1), weight=self.weights[i].expand((num_channels, 1, -1)), stride=self.conv_stride, groups=num_channels, ) # we want conv_wave[:, i] to be at # output[:, i + n*conv_transpose_stride] dilated_conv_wave = paddle.nn.functional.conv1d_transpose( conv_wave, eye, stride=self.conv_transpose_stride) # pad dilated_conv_wave so it reaches the output length if needed. left_padding = i previous_padding = left_padding + dilated_conv_wave.shape[-1] right_padding = max(0, tot_output_samp - previous_padding) dilated_conv_wave = paddle.nn.functional.pad( dilated_conv_wave, [left_padding, right_padding], data_format='NCL') dilated_conv_wave = dilated_conv_wave[:, :, :tot_output_samp] resampled_waveform += dilated_conv_wave return resampled_waveform def _output_samples(self, input_num_samp): samp_in = int(self.orig_freq) samp_out = int(self.new_freq) tick_freq = abs(samp_in * samp_out) // math.gcd(samp_in, samp_out) ticks_per_input_period = tick_freq // samp_in # work out the number of ticks in the time interval # [ 0, input_num_samp/samp_in ). interval_length = input_num_samp * ticks_per_input_period if interval_length <= 0: return 0 ticks_per_output_period = tick_freq // samp_out # Get the last output-sample in the closed interval, # i.e. replacing [ ) with [ ]. Note: integer division rounds down. # See http://en.wikipedia.org/wiki/Interval_(mathematics) for an # explanation of the notation. last_output_samp = interval_length // ticks_per_output_period # We need the last output-sample in the open interval, so if it # takes us to the end of the interval exactly, subtract one. if last_output_samp * ticks_per_output_period == interval_length: last_output_samp -= 1 # First output-sample index is zero, so the number of output samples # is the last output-sample plus one. num_output_samp = last_output_samp + 1 return num_output_samp def _indices_and_weights(self, waveforms): # Lowpass filter frequency depends on smaller of two frequencies min_freq = min(self.orig_freq, self.new_freq) lowpass_cutoff = 0.99 * 0.5 * min_freq assert lowpass_cutoff * 2 <= min_freq window_width = self.lowpass_filter_width / (2.0 * lowpass_cutoff) assert lowpass_cutoff < min(self.orig_freq, self.new_freq) / 2 output_t = paddle.arange(start=0.0, end=self.output_samples) output_t /= self.new_freq min_t = output_t - window_width max_t = output_t + window_width min_input_index = paddle.ceil(min_t * self.orig_freq) max_input_index = paddle.floor(max_t * self.orig_freq) num_indices = max_input_index - min_input_index + 1 max_weight_width = num_indices.max() j = paddle.arange(max_weight_width, dtype='float32') input_index = min_input_index.unsqueeze(1) + j.unsqueeze(0) delta_t = (input_index / self.orig_freq) - output_t.unsqueeze(1) weights = paddle.zeros_like(delta_t) inside_window_indices = delta_t.abs().less_than( paddle.to_tensor(window_width)) # raised-cosine (Hanning) window with width `window_width` weights[inside_window_indices] = 0.5 * (1 + paddle.cos( 2 * math.pi * lowpass_cutoff / self.lowpass_filter_width * delta_t.masked_select(inside_window_indices))) t_eq_zero_indices = delta_t.equal(paddle.zeros_like(delta_t)) t_not_eq_zero_indices = delta_t.not_equal(paddle.zeros_like(delta_t)) # sinc filter function weights = paddle.where( t_not_eq_zero_indices, weights * paddle.sin(2 * math.pi * lowpass_cutoff * delta_t) / (math.pi * delta_t), weights) # limit of the function at t = 0 weights = paddle.where(t_eq_zero_indices, weights * 2 * lowpass_cutoff, weights) # size (output_samples, max_weight_width) weights /= self.orig_freq self.first_indices = min_input_index self.weights = weights class SpeedPerturb(nn.Layer): def __init__( self, orig_freq, speeds=[90, 100, 110], perturb_prob=1.0, ): super(SpeedPerturb, self).__init__() self.orig_freq = orig_freq self.speeds = speeds self.perturb_prob = perturb_prob # Initialize index of perturbation self.samp_index = 0 # Initialize resamplers self.resamplers = [] for speed in self.speeds: config = { "orig_freq": self.orig_freq, "new_freq": self.orig_freq * speed // 100, } self.resamplers.append(Resample(**config)) def forward(self, waveform): # Don't perturb (return early) 1-`perturb_prob` portion of the batches if paddle.rand([1]) > self.perturb_prob: return waveform.clone() # Perform a random perturbation self.samp_index = paddle.randint(len(self.speeds), shape=[1]).item() perturbed_waveform = self.resamplers[self.samp_index](waveform) return perturbed_waveform class AddNoise(nn.Layer): def __init__( self, noise_dataset=None, # None for white noise num_workers=0, snr_low=0, snr_high=0, mix_prob=1.0, start_index=None, normalize=False, ): super(AddNoise, self).__init__() self.num_workers = num_workers self.snr_low = snr_low self.snr_high = snr_high self.mix_prob = mix_prob self.start_index = start_index self.normalize = normalize self.noise_dataset = noise_dataset self.noise_dataloader = None def forward(self, waveforms, lengths=None): if lengths is None: lengths = paddle.ones([len(waveforms)]) # Copy clean waveform to initialize noisy waveform noisy_waveform = waveforms.clone() lengths = (lengths * waveforms.shape[1]).astype('int64').unsqueeze(1) # Don't add noise (return early) 1-`mix_prob` portion of the batches if paddle.rand([1]) > self.mix_prob: return noisy_waveform # Compute the average amplitude of the clean waveforms clean_amplitude = compute_amplitude(waveforms, lengths) # Pick an SNR and use it to compute the mixture amplitude factors SNR = paddle.rand((len(waveforms), 1)) SNR = SNR * (self.snr_high - self.snr_low) + self.snr_low noise_amplitude_factor = 1 / (dB_to_amplitude(SNR) + 1) new_noise_amplitude = noise_amplitude_factor * clean_amplitude # Scale clean signal appropriately noisy_waveform *= 1 - noise_amplitude_factor # Loop through clean samples and create mixture if self.noise_dataset is None: white_noise = paddle.normal(shape=waveforms.shape) noisy_waveform += new_noise_amplitude * white_noise else: tensor_length = waveforms.shape[1] noise_waveform, noise_length = self._load_noise( lengths, tensor_length, ) # Rescale and add noise_amplitude = compute_amplitude(noise_waveform, noise_length) noise_waveform *= new_noise_amplitude / (noise_amplitude + 1e-14) noisy_waveform += noise_waveform # Normalizing to prevent clipping if self.normalize: abs_max, _ = paddle.max( paddle.abs(noisy_waveform), axis=1, keepdim=True) noisy_waveform = noisy_waveform / abs_max.clip(min=1.0) return noisy_waveform def _load_noise(self, lengths, max_length): """ Load a batch of noises args lengths(Paddle.Tensor): Num samples of waveforms with shape (N, 1). max_length(int): Width of a batch. """ lengths = lengths.squeeze(1) batch_size = len(lengths) # Load a noise batch if self.noise_dataloader is None: def noise_collate_fn(batch): def pad(x, target_length, mode='constant', **kwargs): x = np.asarray(x) w = target_length - x.shape[0] assert w >= 0, f'Target length {target_length} is less than origin length {x.shape[0]}' return np.pad(x, [0, w], mode=mode, **kwargs) ids = [item['utt_id'] for item in batch] lengths = np.asarray([item['feat'].shape[0] for item in batch]) waveforms = list( map(lambda x: pad(x, max(max_length, lengths.max().item())), [item['feat'] for item in batch])) waveforms = np.stack(waveforms) return {'ids': ids, 'feats': waveforms, 'lengths': lengths} # Create noise data loader. self.noise_dataloader = paddle.io.DataLoader( self.noise_dataset, batch_size=batch_size, shuffle=True, num_workers=self.num_workers, collate_fn=noise_collate_fn, return_list=True, ) self.noise_data = iter(self.noise_dataloader) noise_batch, noise_len = self._load_noise_batch_of_size(batch_size) # Select a random starting location in the waveform start_index = self.start_index if self.start_index is None: start_index = 0 max_chop = (noise_len - lengths).min().clip(min=1) start_index = paddle.randint(high=max_chop, shape=[1]) # Truncate noise_batch to max_length noise_batch = noise_batch[:, start_index:start_index + max_length] noise_len = (noise_len - start_index).clip(max=max_length).unsqueeze(1) return noise_batch, noise_len def _load_noise_batch_of_size(self, batch_size): """Concatenate noise batches, then chop to correct size""" noise_batch, noise_lens = self._load_noise_batch() # Expand while len(noise_batch) < batch_size: noise_batch = paddle.concat((noise_batch, noise_batch)) noise_lens = paddle.concat((noise_lens, noise_lens)) # Contract if len(noise_batch) > batch_size: noise_batch = noise_batch[:batch_size] noise_lens = noise_lens[:batch_size] return noise_batch, noise_lens def _load_noise_batch(self): """Load a batch of noises, restarting iteration if necessary.""" try: batch = next(self.noise_data) except StopIteration: self.noise_data = iter(self.noise_dataloader) batch = next(self.noise_data) noises, lens = batch['feats'], batch['lengths'] return noises, lens class AddReverb(nn.Layer): def __init__( self, rir_dataset, reverb_prob=1.0, rir_scale_factor=1.0, num_workers=0, ): super(AddReverb, self).__init__() self.rir_dataset = rir_dataset self.reverb_prob = reverb_prob self.rir_scale_factor = rir_scale_factor # Create rir data loader. def rir_collate_fn(batch): def pad(x, target_length, mode='constant', **kwargs): x = np.asarray(x) w = target_length - x.shape[0] assert w >= 0, f'Target length {target_length} is less than origin length {x.shape[0]}' return np.pad(x, [0, w], mode=mode, **kwargs) ids = [item['utt_id'] for item in batch] lengths = np.asarray([item['feat'].shape[0] for item in batch]) waveforms = list( map(lambda x: pad(x, lengths.max().item()), [item['feat'] for item in batch])) waveforms = np.stack(waveforms) return {'ids': ids, 'feats': waveforms, 'lengths': lengths} self.rir_dataloader = paddle.io.DataLoader( self.rir_dataset, collate_fn=rir_collate_fn, num_workers=num_workers, shuffle=True, return_list=True, ) self.rir_data = iter(self.rir_dataloader) def forward(self, waveforms, lengths=None): """ Arguments --------- waveforms : tensor Shape should be `[batch, time]` or `[batch, time, channels]`. lengths : tensor Shape should be a single dimension, `[batch]`. Returns ------- Tensor of shape `[batch, time]` or `[batch, time, channels]`. """ if lengths is None: lengths = paddle.ones([len(waveforms)]) # Don't add reverb (return early) 1-`reverb_prob` portion of the time if paddle.rand([1]) > self.reverb_prob: return waveforms.clone() # Add channels dimension if necessary channel_added = False if len(waveforms.shape) == 2: waveforms = waveforms.unsqueeze(-1) channel_added = True # Load and prepare RIR rir_waveform = self._load_rir() # Compress or dilate RIR if self.rir_scale_factor != 1: rir_waveform = F.interpolate( rir_waveform.transpose([0, 2, 1]), scale_factor=self.rir_scale_factor, mode="linear", align_corners=False, data_format='NCW', ) # (N, C, L) -> (N, L, C) rir_waveform = rir_waveform.transpose([0, 2, 1]) rev_waveform = reverberate( waveforms, rir_waveform, self.rir_dataset.sample_rate, rescale_amp="avg") # Remove channels dimension if added if channel_added: return rev_waveform.squeeze(-1) return rev_waveform def _load_rir(self): try: batch = next(self.rir_data) except StopIteration: self.rir_data = iter(self.rir_dataloader) batch = next(self.rir_data) rir_waveform = batch['feats'] # Make sure RIR has correct channels if len(rir_waveform.shape) == 2: rir_waveform = rir_waveform.unsqueeze(-1) return rir_waveform class AddBabble(nn.Layer): def __init__( self, speaker_count=3, snr_low=0, snr_high=0, mix_prob=1, ): super(AddBabble, self).__init__() self.speaker_count = speaker_count self.snr_low = snr_low self.snr_high = snr_high self.mix_prob = mix_prob def forward(self, waveforms, lengths=None): if lengths is None: lengths = paddle.ones([len(waveforms)]) babbled_waveform = waveforms.clone() lengths = (lengths * waveforms.shape[1]).unsqueeze(1) batch_size = len(waveforms) # Don't mix (return early) 1-`mix_prob` portion of the batches if paddle.rand([1]) > self.mix_prob: return babbled_waveform # Pick an SNR and use it to compute the mixture amplitude factors clean_amplitude = compute_amplitude(waveforms, lengths) SNR = paddle.rand((batch_size, 1)) SNR = SNR * (self.snr_high - self.snr_low) + self.snr_low noise_amplitude_factor = 1 / (dB_to_amplitude(SNR) + 1) new_noise_amplitude = noise_amplitude_factor * clean_amplitude # Scale clean signal appropriately babbled_waveform *= 1 - noise_amplitude_factor # For each speaker in the mixture, roll and add babble_waveform = waveforms.roll((1, ), axis=0) babble_len = lengths.roll((1, ), axis=0) for i in range(1, self.speaker_count): babble_waveform += waveforms.roll((1 + i, ), axis=0) babble_len = paddle.concat( [babble_len, babble_len.roll((1, ), axis=0)], axis=-1).max( axis=-1, keepdim=True) # Rescale and add to mixture babble_amplitude = compute_amplitude(babble_waveform, babble_len) babble_waveform *= new_noise_amplitude / (babble_amplitude + 1e-14) babbled_waveform += babble_waveform return babbled_waveform class TimeDomainSpecAugment(nn.Layer): def __init__( self, perturb_prob=1.0, drop_freq_prob=1.0, drop_chunk_prob=1.0, speeds=[95, 100, 105], sample_rate=16000, drop_freq_count_low=0, drop_freq_count_high=3, drop_chunk_count_low=0, drop_chunk_count_high=5, drop_chunk_length_low=1000, drop_chunk_length_high=2000, drop_chunk_noise_factor=0, ): super(TimeDomainSpecAugment, self).__init__() self.speed_perturb = SpeedPerturb( perturb_prob=perturb_prob, orig_freq=sample_rate, speeds=speeds, ) self.drop_freq = DropFreq( drop_prob=drop_freq_prob, drop_count_low=drop_freq_count_low, drop_count_high=drop_freq_count_high, ) self.drop_chunk = DropChunk( drop_prob=drop_chunk_prob, drop_count_low=drop_chunk_count_low, drop_count_high=drop_chunk_count_high, drop_length_low=drop_chunk_length_low, drop_length_high=drop_chunk_length_high, noise_factor=drop_chunk_noise_factor, ) def forward(self, waveforms, lengths=None): if lengths is None: lengths = paddle.ones([len(waveforms)]) with paddle.no_grad(): # Augmentation waveforms = self.speed_perturb(waveforms) waveforms = self.drop_freq(waveforms) waveforms = self.drop_chunk(waveforms, lengths) return waveforms class EnvCorrupt(nn.Layer): def __init__( self, reverb_prob=1.0, babble_prob=1.0, noise_prob=1.0, rir_dataset=None, noise_dataset=None, num_workers=0, babble_speaker_count=0, babble_snr_low=0, babble_snr_high=0, noise_snr_low=0, noise_snr_high=0, rir_scale_factor=1.0, ): super(EnvCorrupt, self).__init__() # Initialize corrupters if rir_dataset is not None and reverb_prob > 0.0: self.add_reverb = AddReverb( rir_dataset=rir_dataset, num_workers=num_workers, reverb_prob=reverb_prob, rir_scale_factor=rir_scale_factor, ) if babble_speaker_count > 0 and babble_prob > 0.0: self.add_babble = AddBabble( speaker_count=babble_speaker_count, snr_low=babble_snr_low, snr_high=babble_snr_high, mix_prob=babble_prob, ) if noise_dataset is not None and noise_prob > 0.0: self.add_noise = AddNoise( noise_dataset=noise_dataset, num_workers=num_workers, snr_low=noise_snr_low, snr_high=noise_snr_high, mix_prob=noise_prob, ) def forward(self, waveforms, lengths=None): if lengths is None: lengths = paddle.ones([len(waveforms)]) # Augmentation with paddle.no_grad(): if hasattr(self, "add_reverb"): try: waveforms = self.add_reverb(waveforms, lengths) except Exception: pass if hasattr(self, "add_babble"): waveforms = self.add_babble(waveforms, lengths) if hasattr(self, "add_noise"): waveforms = self.add_noise(waveforms, lengths) return waveforms def build_augment_pipeline(target_dir=None) -> List[paddle.nn.Layer]: """build augment pipeline Note: this pipeline cannot be used in the paddle.DataLoader Returns: List[paddle.nn.Layer]: all augment process """ logger.info("start to build the augment pipeline") noise_dataset = CSVDataset(csv_path=os.path.join(target_dir, "rir_noise/csv/noise.csv")) rir_dataset = CSVDataset(csv_path=os.path.join(target_dir, "rir_noise/csv/rir.csv")) wavedrop = TimeDomainSpecAugment( sample_rate=16000, speeds=[100], ) speed_perturb = TimeDomainSpecAugment( sample_rate=16000, speeds=[95, 100, 105], ) add_noise = EnvCorrupt( noise_dataset=noise_dataset, reverb_prob=0.0, noise_prob=1.0, noise_snr_low=0, noise_snr_high=15, rir_scale_factor=1.0, ) add_rev = EnvCorrupt( rir_dataset=rir_dataset, reverb_prob=1.0, noise_prob=0.0, rir_scale_factor=1.0, ) add_rev_noise = EnvCorrupt( noise_dataset=noise_dataset, rir_dataset=rir_dataset, reverb_prob=1.0, noise_prob=1.0, noise_snr_low=0, noise_snr_high=15, rir_scale_factor=1.0, ) return [wavedrop, speed_perturb, add_noise, add_rev, add_rev_noise] def waveform_augment(waveforms: paddle.Tensor, augment_pipeline: List[paddle.nn.Layer]) -> paddle.Tensor: """process the augment pipeline and return all the waveforms Args: waveforms (paddle.Tensor): original batch waveform augment_pipeline (List[paddle.nn.Layer]): agument pipeline process Returns: paddle.Tensor: all the audio waveform including the original waveform and augmented waveform """ # stage 0: store the original waveforms waveforms_aug_list = [waveforms] # augment the original batch waveform for aug in augment_pipeline: # stage 1: augment the data waveforms_aug = aug(waveforms) # (N, L) if waveforms_aug.shape[1] >= waveforms.shape[1]: # Trunc waveforms_aug = waveforms_aug[:, :waveforms.shape[1]] else: # Pad lengths_to_pad = waveforms.shape[1] - waveforms_aug.shape[1] waveforms_aug = F.pad( waveforms_aug.unsqueeze(-1), [0, lengths_to_pad], data_format='NLC').squeeze(-1) # stage 2: append the augmented waveform into the list waveforms_aug_list.append(waveforms_aug) # get the all the waveforms return paddle.concat(waveforms_aug_list, axis=0)
16,266
388
<gh_stars>100-1000 /* * Copyright 2019 Amazon.com, Inc. or its affiliates. * Licensed under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #ifndef AWS_AUTH_MUTABLE_STATIC_CREDS_PROVIDER_H_ #define AWS_AUTH_MUTABLE_STATIC_CREDS_PROVIDER_H_ #include <aws/core/auth/AWSCredentialsProvider.h> #include <atomic> #include <cstdint> #include <mutex> #include <memory> namespace aws { namespace auth { struct VersionedCredentials { std::uint64_t version_; Aws::Auth::AWSCredentials creds_; VersionedCredentials() : version_(0), creds_("", "", "") {} VersionedCredentials(std::uint64_t version, const std::string& akid, const std::string& sk, const std::string& token); }; // Like basic static creds, but with an atomic set operation class MutableStaticCredentialsProvider : public Aws::Auth::AWSCredentialsProvider { public: MutableStaticCredentialsProvider(const std::string& akid, const std::string& sk, std::string token = ""); void set_credentials(const std::string& akid, const std::string& sk, std::string token = ""); Aws::Auth::AWSCredentials GetAWSCredentials() override; private: std::mutex update_mutex_; std::shared_ptr<VersionedCredentials> creds_; std::atomic<std::uint64_t> version_; }; } //namespace auth } //namespace aws #endif //AWS_AUTH_MUTABLE_STATIC_CREDS_PROVIDER_H_
594
376
<gh_stars>100-1000 package net.zhuoweizhang.boardwalk.model; import java.util.Map; public class MinecraftAssets { public Map<String, MinecraftAssetInfo> objects; }
56
417
//----------------------------------------------------------------------------- // Copyright (c) 2012 GarageGames, LLC // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to // deal in the Software without restriction, including without limitation the // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or // sell copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS // IN THE SOFTWARE. //----------------------------------------------------------------------------- #ifndef _GUI_INSPECTOR_DYNAMICGROUP_H_ #define _GUI_INSPECTOR_DYNAMICGROUP_H_ #include "gui/editor/inspector/group.h" #include "console/simFieldDictionary.h" class GuiInspectorDynamicGroup : public GuiInspectorGroup { private: typedef GuiInspectorGroup Parent; GuiControl* mAddCtrl; public: DECLARE_CONOBJECT(GuiInspectorDynamicGroup); GuiInspectorDynamicGroup() { mAddCtrl = NULL;/*mNeedScroll=false;*/ }; GuiInspectorDynamicGroup( StringTableEntry groupName, SimObjectPtr<GuiInspector> parent ) : GuiInspectorGroup( groupName, parent) { mAddCtrl = NULL;/*mNeedScroll=false;*/}; //----------------------------------------------------------------------------- // inspectGroup is overridden in GuiInspectorDynamicGroup to inspect an // objects FieldDictionary (dynamic fields) instead of regular persistent // fields. bool inspectGroup(); virtual void updateAllFields(); // For scriptable dynamic field additions void addDynamicField(); // Clear our fields (delete them) void clearFields(); // Find an already existent field by name in the dictionary virtual SimFieldDictionary::Entry* findDynamicFieldInDictionary( StringTableEntry fieldName ); protected: // create our inner controls when we add virtual bool createContent(); }; #endif // _GUI_INSPECTOR_DYNAMICGROUP_H_
713
4,538
#ifndef _RTL_LIB_ROM_H_ #define _RTL_LIB_ROM_H_ #include "memproc.h" #include "strproc.h" #include "diag.h" #endif /* _RTL_LIB_ROM_H_ */
74
416
<reponame>onezens/sdks // // MKScaleView.h // Maps // // Copyright 2017, Apple. All rights reserved. // // System #import <UIKit/UIKit.h> #import <MapKit/MKTypes.h> @class MKMapView; typedef NS_ENUM(NSInteger, MKScaleViewAlignment) { MKScaleViewAlignmentLeading, MKScaleViewAlignmentTrailing, } NS_AVAILABLE_IOS(11_0) __TVOS_AVAILABLE(11_0); NS_ASSUME_NONNULL_BEGIN NS_CLASS_AVAILABLE_IOS(11_0) __TVOS_AVAILABLE(11_0) @interface MKScaleView : UIView + (instancetype)scaleViewWithMapView:(nullable MKMapView *)mapView; @property (nonatomic, nullable, weak) MKMapView *mapView; @property (nonatomic, assign) MKFeatureVisibility scaleVisibility; @property (nonatomic, assign) MKScaleViewAlignment legendAlignment; @end NS_ASSUME_NONNULL_END
299
901
<filename>serialization/src/main/java/com/twitter/serial/serializer/BuilderSerializer.java /* * Copyright 2017 Twitter * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.twitter.serial.serializer; import com.twitter.serial.object.Builder; import com.twitter.serial.stream.SerializerInput; import com.twitter.serial.util.OptionalFieldException; import com.twitter.serial.util.SerializationException; import com.twitter.serial.util.SerializationUtils; import org.jetbrains.annotations.NotNull; import java.io.EOFException; import java.io.IOException; import java.io.OptionalDataException; /** * A base serializer for a class that has an {@link Builder}. The serialized data delimits the boundaries * of the object and provides basic support for versioning. */ public abstract class BuilderSerializer<T, B extends Builder<T>> extends ObjectSerializer<T> { protected BuilderSerializer() { } protected BuilderSerializer(int versionNumber) { super(versionNumber); } public void deserialize(@NotNull SerializationContext context, @NotNull SerializerInput input, @NotNull B builder) throws IOException, ClassNotFoundException { if (SerializationUtils.readNullIndicator(input)) { return; } final int deserializedVersionNumber = input.readObjectStart(); if (deserializedVersionNumber > mVersionNumber) { throw new SerializationException( "Version number found (" + deserializedVersionNumber + ") is " + "greater than the maximum supported value (" + mVersionNumber + ")"); } deserialize(context, input, builder, deserializedVersionNumber); input.readObjectEnd(); } @NotNull @Override protected final T deserializeObject(@NotNull SerializationContext context, @NotNull SerializerInput input, int versionNumber) throws IOException, ClassNotFoundException { final B builder = createBuilder(); deserialize(context, input, builder, versionNumber); return builder.build(); } private void deserialize(@NotNull SerializationContext context, @NotNull SerializerInput input, @NotNull B builder, int versionNumber) throws IOException, ClassNotFoundException { try { //noinspection BlacklistedMethod deserializeToBuilder(context, input, builder, versionNumber); } catch (OptionalDataException | EOFException | OptionalFieldException ignore) { // This may happen when reading optional fields. The builder should already // contain all the available fields, so just ignore the exception. } } @NotNull protected abstract B createBuilder(); protected abstract void deserializeToBuilder(@NotNull SerializationContext context, @NotNull SerializerInput input, @NotNull B builder, int versionNumber) throws IOException, ClassNotFoundException; }
1,162
14,668
<reponame>chromium/chromium<filename>chrome/browser/ash/login/screens/update_required_screen_browsertest.cc // Copyright 2019 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "chrome/browser/ash/login/screens/update_required_screen.h" #include <memory> #include "base/callback.h" #include "base/callback_helpers.h" #include "base/json/json_writer.h" #include "base/run_loop.h" #include "base/time/default_clock.h" #include "base/time/time.h" #include "chrome/browser/ash/login/login_wizard.h" #include "chrome/browser/ash/login/screens/error_screen.h" #include "chrome/browser/ash/login/test/device_state_mixin.h" #include "chrome/browser/ash/login/test/js_checker.h" #include "chrome/browser/ash/login/test/login_manager_mixin.h" #include "chrome/browser/ash/login/test/network_portal_detector_mixin.h" #include "chrome/browser/ash/login/test/oobe_base_test.h" #include "chrome/browser/ash/login/test/oobe_screen_waiter.h" #include "chrome/browser/ash/login/ui/login_display_host.h" #include "chrome/browser/ash/login/version_updater/version_updater.h" #include "chrome/browser/ash/login/wizard_controller.h" #include "chrome/browser/ash/policy/core/device_policy_builder.h" #include "chrome/browser/ash/policy/core/device_policy_cros_browser_test.h" #include "chrome/browser/ash/policy/handlers/minimum_version_policy_test_helpers.h" #include "chrome/browser/ui/webui/chromeos/login/error_screen_handler.h" #include "chrome/browser/ui/webui/chromeos/login/gaia_screen_handler.h" #include "chrome/browser/ui/webui/chromeos/login/oobe_ui.h" #include "chrome/browser/ui/webui/chromeos/login/update_required_screen_handler.h" #include "chrome/grit/chromium_strings.h" #include "chrome/grit/generated_resources.h" #include "chromeos/dbus/constants/dbus_switches.h" #include "chromeos/dbus/dbus_thread_manager.h" #include "chromeos/dbus/update_engine/fake_update_engine_client.h" #include "chromeos/network/network_state_test_helper.h" #include "components/user_manager/user_manager.h" #include "content/public/test/browser_test.h" #include "dbus/object_path.h" #include "third_party/abseil-cpp/absl/types/optional.h" #include "third_party/cros_system_api/dbus/service_constants.h" #include "ui/base/l10n/l10n_util.h" #include "ui/chromeos/devicetype_utils.h" #include "ui/strings/grit/ui_strings.h" namespace ash { namespace { namespace em = ::enterprise_management; const test::UIPath kUpdateRequiredScreen = {"update-required"}; const test::UIPath kUpdateRequiredStep = {"update-required", "update-required-dialog"}; const test::UIPath kUpdateRequiredSubtitle = {"update-required", "update-subtitle"}; const test::UIPath kUpdateNowButton = {"update-required", "update-button"}; const test::UIPath kUpdateProcessStep = {"update-required", "downloadingUpdate"}; const test::UIPath kUpdateRequiredEolDialog = {"update-required", "eolDialog"}; const test::UIPath kEolAdminMessageContainer = {"update-required", "adminMessageContainer"}; const test::UIPath kEolAdminMessage = {"update-required", "adminMessage"}; const test::UIPath kEolDeleteUsersDataMessage = {"update-required", "deleteDataMessageContainer"}; const test::UIPath kEolNoUsersDataMsg = {"update-required", "noUsersDataMessage"}; const test::UIPath kEolDeleteUsersDataLink = {"update-required", "deleteDataLink"}; const test::UIPath kEolDeleteUsersDataConfirmDialog = {"update-required", "confirmationDialog"}; const test::UIPath kEolDeleteUsersDataConfirmButton = {"update-required", "confirmDelete"}; const test::UIPath kEolDeleteUsersDataCancelButton = {"update-required", "cancelDelete"}; const test::UIPath kMeteredNetworkStep = {"update-required", "update-need-permission-dialog"}; const test::UIPath kMeteredNetworkAcceptButton = { "update-required", "cellular-permission-accept-button"}; const test::UIPath kNoNetworkStep = {"update-required", "update-required-no-network-dialog"}; // Elements in downloadingUpdate const test::UIPath kUpdateProcessCheckingStep = { "update-required", "downloadingUpdate", "checking-for-updates-dialog"}; const test::UIPath kUpdateProcessUpdatingStep = { "update-required", "downloadingUpdate", "updating-dialog"}; const test::UIPath kUpdateProcessCompleteStep = { "update-required", "downloadingUpdate", "update-complete-dialog"}; const test::UIPath kCheckingForUpdatesMessage = { "update-required", "downloadingUpdate", "checkingForUpdatesMsg"}; const test::UIPath kUpdatingProgress = {"update-required", "downloadingUpdate", "updating-progress"}; constexpr char kWifiServicePath[] = "/service/wifi2"; constexpr char kCellularServicePath[] = "/service/cellular1"; constexpr char kDemoEolMessage[] = "Please return your device."; constexpr char16_t kDomain16[] = u"example.com"; constexpr char kManager[] = "<EMAIL>"; constexpr char16_t kManager16[] = u"<EMAIL>"; chromeos::OobeUI* GetOobeUI() { auto* host = LoginDisplayHost::default_host(); return host ? host->GetOobeUI() : nullptr; } void ErrorCallbackFunction(base::OnceClosure run_loop_quit_closure, const std::string& error_name, const std::string& error_message) { std::move(run_loop_quit_closure).Run(); FAIL() << "Shill Error: " << error_name << " : " << error_message; } void SetConnected(const std::string& service_path) { base::RunLoop run_loop; ShillServiceClient::Get()->Connect( dbus::ObjectPath(service_path), run_loop.QuitWhenIdleClosure(), base::BindOnce(&ErrorCallbackFunction, run_loop.QuitClosure())); run_loop.Run(); } void WaitForConfirmationDialogToOpen() { test::OobeJS() .CreateWaiter( test::GetOobeElementPath({kEolDeleteUsersDataConfirmDialog}) + ".open") ->Wait(); } void WaitForConfirmationDialogToClose() { test::OobeJS() .CreateWaiter( test::GetOobeElementPath({kEolDeleteUsersDataConfirmDialog}) + ".open === false") ->Wait(); } class UpdateRequiredScreenTest : public OobeBaseTest { public: UpdateRequiredScreenTest() { login_manager_mixin_.AppendRegularUsers(2); } ~UpdateRequiredScreenTest() override = default; UpdateRequiredScreenTest(const UpdateRequiredScreenTest&) = delete; UpdateRequiredScreenTest& operator=(const UpdateRequiredScreenTest&) = delete; // OobeBaseTest: void SetUpCommandLine(base::CommandLine* command_line) override { OobeBaseTest::SetUpCommandLine(command_line); command_line->AppendSwitchASCII(chromeos::switches::kShillStub, "clear=1, cellular=1, wifi=1"); } void SetUpOnMainThread() override { OobeBaseTest::SetUpOnMainThread(); // Set up fake networks. network_state_test_helper_ = std::make_unique<chromeos::NetworkStateTestHelper>( true /*use_default_devices_and_services*/); network_state_test_helper_->manager_test()->SetupDefaultEnvironment(); // Fake networks have been set up. Connect to WiFi network. SetConnected(kWifiServicePath); } void TearDownOnMainThread() override { network_state_test_helper_.reset(); OobeBaseTest::TearDownOnMainThread(); } void SetUpdateEngineStatus(update_engine::Operation operation) { update_engine::StatusResult status; status.set_current_operation(operation); update_engine_client()->set_default_status(status); update_engine_client()->NotifyObserversThatStatusChanged(status); } void SetNetworkState(const std::string& service_path, const std::string& state) { network_state_test_helper_->service_test()->SetServiceProperty( service_path, shill::kStateProperty, base::Value(state)); } void ShowUpdateRequiredScreen() { LoginDisplayHost::default_host()->StartWizard( UpdateRequiredView::kScreenId); OobeScreenWaiter update_screen_waiter(UpdateRequiredView::kScreenId); update_screen_waiter.set_assert_next_screen(); update_screen_waiter.Wait(); test::OobeJS().ExpectVisiblePath(kUpdateRequiredScreen); } void SetEolMessageAndWaitForSettingsChange(std::string eol_message) { policy::DevicePolicyBuilder* const device_policy( policy_helper_.device_policy()); em::ChromeDeviceSettingsProto& proto(device_policy->payload()); proto.mutable_device_minimum_version_aue_message()->set_value(eol_message); policy_helper_.RefreshPolicyAndWaitUntilDeviceSettingsUpdated( {kDeviceMinimumVersionAueMessage}); } protected: UpdateRequiredScreen* update_required_screen_; // Error screen - owned by OobeUI. // Version updater - owned by `update_required_screen_`. VersionUpdater* version_updater_ = nullptr; // For testing captive portal NetworkPortalDetectorMixin network_portal_detector_{&mixin_host_}; // Handles network connections std::unique_ptr<chromeos::NetworkStateTestHelper> network_state_test_helper_; policy::DevicePolicyCrosTestHelper policy_helper_; DeviceStateMixin device_state_mixin_{ &mixin_host_, DeviceStateMixin::State::OOBE_COMPLETED_CLOUD_ENROLLED}; LoginManagerMixin login_manager_mixin_{&mixin_host_}; }; IN_PROC_BROWSER_TEST_F(UpdateRequiredScreenTest, TestCaptivePortal) { ShowUpdateRequiredScreen(); network_portal_detector_.SimulateDefaultNetworkState( NetworkPortalDetector::CAPTIVE_PORTAL_STATUS_PORTAL); static_cast<UpdateRequiredScreen*>( WizardController::default_controller()->current_screen()) ->SetErrorMessageDelayForTesting(base::Milliseconds(10)); test::OobeJS().ExpectVisiblePath(kUpdateRequiredStep); // Click update button to trigger the update process. test::OobeJS().ClickOnPath(kUpdateNowButton); // If the network is a captive portal network, error message is shown with a // delay. OobeScreenWaiter error_screen_waiter(ErrorScreenView::kScreenId); error_screen_waiter.set_assert_next_screen(); error_screen_waiter.Wait(); ErrorScreen* error_screen = GetOobeUI()->GetErrorScreen(); EXPECT_EQ(UpdateRequiredView::kScreenId.AsId(), error_screen->GetParentScreen()); test::OobeJS().ExpectVisible("error-message"); test::OobeJS().ExpectVisiblePath( {"error-message", "captive-portal-message-text"}); test::OobeJS().ExpectVisiblePath( {"error-message", "captive-portal-proxy-message-text"}); // If network goes back online, the error screen should be hidden and update // process should start. network_portal_detector_.SimulateDefaultNetworkState( NetworkPortalDetector::CAPTIVE_PORTAL_STATUS_ONLINE); EXPECT_EQ(OobeScreen::SCREEN_UNKNOWN.AsId(), error_screen->GetParentScreen()); SetUpdateEngineStatus(update_engine::Operation::CHECKING_FOR_UPDATE); SetUpdateEngineStatus(update_engine::Operation::UPDATE_AVAILABLE); test::OobeJS().ExpectVisiblePath(kUpdateRequiredScreen); test::OobeJS().ExpectVisiblePath(kUpdateProcessStep); } IN_PROC_BROWSER_TEST_F(UpdateRequiredScreenTest, TestEolReached) { update_engine_client()->set_eol_date( base::DefaultClock::GetInstance()->Now() - base::Days(1)); ShowUpdateRequiredScreen(); test::OobeJS().ExpectVisiblePath(kUpdateRequiredEolDialog); test::OobeJS().ExpectHiddenPath(kUpdateRequiredStep); test::OobeJS().ExpectVisiblePath(kEolDeleteUsersDataMessage); test::OobeJS().ExpectHiddenPath(kEolNoUsersDataMsg); } // Test to verify that clicking on the confirm button on the popup in case of // update required and end-of-life reached, deletes all users on the device. IN_PROC_BROWSER_TEST_F(UpdateRequiredScreenTest, TestEolDeleteUsersConfirm) { EXPECT_EQ(user_manager::UserManager::Get()->GetUsers().size(), 2u); update_engine_client()->set_eol_date( base::DefaultClock::GetInstance()->Now() - base::Days(1)); ShowUpdateRequiredScreen(); test::OobeJS().ExpectVisiblePath(kUpdateRequiredEolDialog); test::OobeJS().ExpectVisiblePath(kEolDeleteUsersDataMessage); test::OobeJS().TapOnPath(kEolDeleteUsersDataLink); WaitForConfirmationDialogToOpen(); test::OobeJS().TapOnPath(kEolDeleteUsersDataConfirmButton); WaitForConfirmationDialogToClose(); test::OobeJS().CreateVisibilityWaiter(true, kEolNoUsersDataMsg)->Wait(); test::OobeJS().ExpectHiddenPath(kEolDeleteUsersDataMessage); EXPECT_EQ(user_manager::UserManager::Get()->GetUsers().size(), 0u); } // Test to verify that clicking on the cancel button on the popup in case of // update required and end-of-life reached, does not delete any user. IN_PROC_BROWSER_TEST_F(UpdateRequiredScreenTest, TestEolDeleteUsersCancel) { EXPECT_EQ(user_manager::UserManager::Get()->GetUsers().size(), 2u); update_engine_client()->set_eol_date( base::DefaultClock::GetInstance()->Now() - base::Days(1)); ShowUpdateRequiredScreen(); test::OobeJS().ExpectVisiblePath(kUpdateRequiredEolDialog); test::OobeJS().ExpectVisiblePath(kEolDeleteUsersDataMessage); test::OobeJS().TapOnPath(kEolDeleteUsersDataLink); WaitForConfirmationDialogToOpen(); test::OobeJS().TapOnPath(kEolDeleteUsersDataCancelButton); WaitForConfirmationDialogToClose(); test::OobeJS().ExpectVisiblePath(kEolDeleteUsersDataMessage); test::OobeJS().ExpectHiddenPath(kEolNoUsersDataMsg); EXPECT_EQ(user_manager::UserManager::Get()->GetUsers().size(), 2u); } IN_PROC_BROWSER_TEST_F(UpdateRequiredScreenTest, TestEolReachedAdminMessage) { update_engine_client()->set_eol_date( base::DefaultClock::GetInstance()->Now() - base::Days(1)); SetEolMessageAndWaitForSettingsChange(kDemoEolMessage); ShowUpdateRequiredScreen(); test::OobeJS().ExpectVisiblePath(kUpdateRequiredEolDialog); test::OobeJS().ExpectVisiblePath(kEolAdminMessageContainer); test::OobeJS().ExpectElementText(kDemoEolMessage, kEolAdminMessage); test::OobeJS().ExpectHiddenPath(kUpdateRequiredStep); } IN_PROC_BROWSER_TEST_F(UpdateRequiredScreenTest, TestEolNotReached) { update_engine_client()->set_eol_date( base::DefaultClock::GetInstance()->Now() + base::Days(1)); ShowUpdateRequiredScreen(); test::OobeJS().ExpectHiddenPath(kUpdateRequiredEolDialog); test::OobeJS().ExpectVisiblePath(kUpdateRequiredStep); } // This tests the state of update required screen when the device is initially // connected to a metered network and the user grants permission to update over // it. IN_PROC_BROWSER_TEST_F(UpdateRequiredScreenTest, TestUpdateOverMeteredNetwork) { // Disconnect Wifi network. SetNetworkState(kWifiServicePath, shill::kStateIdle); // Connect to cellular network and show update required screen. SetConnected(kCellularServicePath); ShowUpdateRequiredScreen(); // Screen prompts user to either connect to a non-metered network or start // update over current metered network. test::OobeJS().ExpectHiddenPath(kUpdateRequiredStep); test::OobeJS().ExpectVisiblePath(kMeteredNetworkStep); // Click to start update over metered network. test::OobeJS().TapOnPath(kMeteredNetworkAcceptButton); test::OobeJS().CreateVisibilityWaiter(true, kUpdateProcessStep)->Wait(); // Expect screen to show progress of the update process. test::OobeJS().ExpectHiddenPath(kMeteredNetworkStep); test::OobeJS().ExpectHiddenPath(kUpdateRequiredStep); SetUpdateEngineStatus(update_engine::Operation::UPDATED_NEED_REBOOT); // UpdateStatusChanged(status) calls RebootAfterUpdate(). EXPECT_EQ(1, update_engine_client()->reboot_after_update_call_count()); } // This tests the state of update required screen when the device is initially // not connected to any network and the user connects to Wifi to show update // required screen. IN_PROC_BROWSER_TEST_F(UpdateRequiredScreenTest, TestUpdateRequiredNoNetwork) { // Disconnect from all networks and show update required screen. network_state_test_helper_->service_test()->ClearServices(); base::RunLoop().RunUntilIdle(); ShowUpdateRequiredScreen(); // Screen shows user to connect to a network to start update. test::OobeJS().ExpectHiddenPath(kUpdateRequiredStep); test::OobeJS().ExpectVisiblePath(kNoNetworkStep); // Connect to a WiFi network. network_state_test_helper_->service_test()->AddService( kWifiServicePath, kWifiServicePath, kWifiServicePath /* name */, shill::kTypeWifi, shill::kStateOnline, true); // Update required screen is shown when user moves from no network to a good // network. test::OobeJS().CreateVisibilityWaiter(true, kUpdateRequiredStep)->Wait(); } // This tests the condition when the user switches to a metered network during // the update process. The user then grants the permission to continue the // update. IN_PROC_BROWSER_TEST_F(UpdateRequiredScreenTest, TestUpdateProcessNeedPermission) { // Wifi is connected, show update required screen. ShowUpdateRequiredScreen(); test::OobeJS().ExpectVisiblePath(kUpdateRequiredStep); // Click to start update process. test::OobeJS().ClickOnPath(kUpdateNowButton); test::OobeJS().CreateVisibilityWaiter(true, kUpdateProcessStep)->Wait(); // Expect screen to show progress of the update process. test::OobeJS().ExpectHiddenPath(kUpdateRequiredStep); test::OobeJS().ExpectVisiblePath(kUpdateProcessStep); // Network changed to a metered network and update engine requires permission // to continue. SetUpdateEngineStatus(update_engine::Operation::CHECKING_FOR_UPDATE); SetUpdateEngineStatus(update_engine::Operation::UPDATE_AVAILABLE); SetUpdateEngineStatus(update_engine::Operation::DOWNLOADING); SetUpdateEngineStatus(update_engine::Operation::NEED_PERMISSION_TO_UPDATE); test::OobeJS().CreateVisibilityWaiter(true, kMeteredNetworkStep)->Wait(); test::OobeJS().ExpectHiddenPath(kUpdateProcessStep); // Screen prompts user to continue update on metered network. Click to // continue. test::OobeJS().TapOnPath(kMeteredNetworkAcceptButton); // Update process resumes. test::OobeJS().CreateVisibilityWaiter(true, kUpdateProcessStep)->Wait(); test::OobeJS().ExpectHiddenPath(kMeteredNetworkStep); SetUpdateEngineStatus(update_engine::Operation::UPDATED_NEED_REBOOT); // UpdateStatusChanged(status) calls RebootAfterUpdate(). EXPECT_EQ(1, update_engine_client()->reboot_after_update_call_count()); } // This tests the state of update required screen when the device is initially // connected to a metered network and the update process starts automatically on // switching to a non metered network. IN_PROC_BROWSER_TEST_F(UpdateRequiredScreenTest, TestMeteredNetworkToGoodNetwork) { // Disconnect from Wifi and connect to cellular network. SetNetworkState(kWifiServicePath, shill::kStateIdle); SetConnected(kCellularServicePath); ShowUpdateRequiredScreen(); // Screen prompts user to either connect to a non-metered network or start // update over current metered network. test::OobeJS().ExpectHiddenPath(kUpdateRequiredStep); test::OobeJS().ExpectVisiblePath(kMeteredNetworkStep); // Connect to a WiFi network and update starts automatically. SetNetworkState(kWifiServicePath, shill::kStateOnline); test::OobeJS().CreateVisibilityWaiter(true, kUpdateProcessStep)->Wait(); test::OobeJS().ExpectVisiblePath(kUpdateRequiredScreen); test::OobeJS().ExpectHiddenPath(kMeteredNetworkStep); SetUpdateEngineStatus(update_engine::Operation::CHECKING_FOR_UPDATE); SetUpdateEngineStatus(update_engine::Operation::UPDATE_AVAILABLE); SetUpdateEngineStatus(update_engine::Operation::DOWNLOADING); SetUpdateEngineStatus(update_engine::Operation::UPDATED_NEED_REBOOT); // UpdateStatusChanged(status) calls RebootAfterUpdate(). EXPECT_EQ(1, update_engine_client()->reboot_after_update_call_count()); } // This tests the update process initiated from update required screen. IN_PROC_BROWSER_TEST_F(UpdateRequiredScreenTest, TestUpdateProcess) { // Wifi is connected, show update required screen. ShowUpdateRequiredScreen(); test::OobeJS().ExpectVisiblePath(kUpdateRequiredStep); // Click to start update process. test::OobeJS().ClickOnPath(kUpdateNowButton); test::OobeJS().CreateVisibilityWaiter(true, kUpdateProcessStep)->Wait(); test::OobeJS().ExpectHiddenPath(kUpdateRequiredStep); SetUpdateEngineStatus(update_engine::Operation::CHECKING_FOR_UPDATE); // Wait for the content of the dialog to be rendered. test::OobeJS() .CreateDisplayedWaiter(true, kCheckingForUpdatesMessage) ->Wait(); test::OobeJS().ExpectVisiblePath(kUpdateProcessCheckingStep); test::OobeJS().ExpectHiddenPath(kUpdateProcessUpdatingStep); test::OobeJS().ExpectHiddenPath(kUpdateProcessCompleteStep); SetUpdateEngineStatus(update_engine::Operation::DOWNLOADING); // Wait for the content of the dialog to be rendered. test::OobeJS().CreateDisplayedWaiter(true, kUpdatingProgress)->Wait(); test::OobeJS().ExpectHiddenPath(kUpdateProcessCheckingStep); SetUpdateEngineStatus(update_engine::Operation::UPDATED_NEED_REBOOT); test::OobeJS() .CreateVisibilityWaiter(true, kUpdateProcessCompleteStep) ->Wait(); test::OobeJS().ExpectHiddenPath(kUpdateProcessUpdatingStep); // UpdateStatusChanged(status) calls RebootAfterUpdate(). EXPECT_EQ(1, update_engine_client()->reboot_after_update_call_count()); } // Validates that the manager presented to the user in the subtitle is the // domain to which the device belongs. IN_PROC_BROWSER_TEST_F(UpdateRequiredScreenTest, TestSubtitle) { ShowUpdateRequiredScreen(); test::OobeJS().ExpectElementText( l10n_util::GetStringFUTF8(IDS_UPDATE_REQUIRED_SCREEN_MESSAGE, kDomain16, ui::GetChromeOSDeviceName()), kUpdateRequiredSubtitle); } class UpdateRequiredScreenFlexOrgTest : public UpdateRequiredScreenTest { public: UpdateRequiredScreenFlexOrgTest() {} ~UpdateRequiredScreenFlexOrgTest() override = default; void SetUpInProcessBrowserTestFixture() override { UpdateRequiredScreenTest::SetUpInProcessBrowserTestFixture(); policy_helper_.device_policy()->policy_data().set_managed_by(kManager); policy_helper_.RefreshDevicePolicy(); } protected: policy::DevicePolicyCrosTestHelper policy_helper_; }; // For FlexOrgs, validates that the manager presented to the user in the // subtitle is the manager of the FlexOrg. IN_PROC_BROWSER_TEST_F(UpdateRequiredScreenFlexOrgTest, TestSubtitle) { ShowUpdateRequiredScreen(); test::OobeJS().ExpectElementText( l10n_util::GetStringFUTF8(IDS_UPDATE_REQUIRED_SCREEN_MESSAGE, kManager16, ui::GetChromeOSDeviceName()), kUpdateRequiredSubtitle); } class UpdateRequiredScreenPolicyPresentTest : public OobeBaseTest { public: UpdateRequiredScreenPolicyPresentTest() {} ~UpdateRequiredScreenPolicyPresentTest() override = default; void SetUpInProcessBrowserTestFixture() override { OobeBaseTest::SetUpInProcessBrowserTestFixture(); // Create and set policy value. SetAndRefreshMinimumChromeVersionPolicy( policy::CreateMinimumVersionSingleRequirementPolicyValue( "1111.2.3.4" /* version */, 0 /* warning */, 0 /* eol_warning */, false /* unmanaged_user_restricted */)); // Simulate end-of-life reached. update_engine_client()->set_eol_date( base::DefaultClock::GetInstance()->Now() - base::Days(1)); } void SetMinimumChromeVersionPolicy(const base::Value& value) { policy::DevicePolicyBuilder* const device_policy( policy_helper_.device_policy()); em::ChromeDeviceSettingsProto& proto(device_policy->payload()); std::string policy_value; EXPECT_TRUE(base::JSONWriter::Write(value, &policy_value)); proto.mutable_device_minimum_version()->set_value(policy_value); } void SetAndRefreshMinimumChromeVersionPolicy(const base::Value& value) { SetMinimumChromeVersionPolicy(value); policy_helper_.RefreshDevicePolicy(); } protected: DeviceStateMixin device_state_mixin_{ &mixin_host_, DeviceStateMixin::State::OOBE_COMPLETED_CLOUD_ENROLLED}; policy::DevicePolicyCrosTestHelper policy_helper_; }; // Test to verify that reboot after deleting all users data from the device // still shows the update required screen to block user sign in. IN_PROC_BROWSER_TEST_F(UpdateRequiredScreenPolicyPresentTest, TestUpdateRequiredScreen) { EXPECT_EQ(user_manager::UserManager::Get()->GetUsers().size(), 0u); OobeScreenWaiter update_screen_waiter(UpdateRequiredView::kScreenId); update_screen_waiter.set_assert_next_screen(); update_screen_waiter.Wait(); test::OobeJS().ExpectVisiblePath(kUpdateRequiredEolDialog); test::OobeJS().ExpectVisiblePath(kEolNoUsersDataMsg); test::OobeJS().ExpectHiddenPath(kEolDeleteUsersDataMessage); } } // namespace } // namespace ash
8,745
13,648
# Driver for Mboot, the MicroPython boot loader # MIT license; Copyright (c) 2018 <NAME> import struct, time, os, hashlib I2C_CMD_ECHO = 1 I2C_CMD_GETID = 2 I2C_CMD_GETCAPS = 3 I2C_CMD_RESET = 4 I2C_CMD_CONFIG = 5 I2C_CMD_GETLAYOUT = 6 I2C_CMD_MASSERASE = 7 I2C_CMD_PAGEERASE = 8 I2C_CMD_SETRDADDR = 9 I2C_CMD_SETWRADDR = 10 I2C_CMD_READ = 11 I2C_CMD_WRITE = 12 I2C_CMD_COPY = 13 I2C_CMD_CALCHASH = 14 I2C_CMD_MARKVALID = 15 class Bootloader: def __init__(self, i2c, addr): self.i2c = i2c self.addr = addr self.buf1 = bytearray(1) try: self.i2c.writeto(addr, b"") except OSError: raise Exception("no I2C mboot device found") def wait_response(self): start = time.ticks_ms() while 1: try: self.i2c.readfrom_into(self.addr, self.buf1) n = self.buf1[0] break except OSError as er: time.sleep_us(500) if time.ticks_diff(time.ticks_ms(), start) > 5000: raise Exception("timeout") if n >= 129: raise Exception(n) if n == 0: return b"" else: return self.i2c.readfrom(self.addr, n) def wait_empty_response(self): ret = self.wait_response() if ret: raise Exception("expected empty response got %r" % ret) else: return None def echo(self, data): self.i2c.writeto(self.addr, struct.pack("<B", I2C_CMD_ECHO) + data) return self.wait_response() def getid(self): self.i2c.writeto(self.addr, struct.pack("<B", I2C_CMD_GETID)) ret = self.wait_response() unique_id = ret[:12] mcu_name, board_name = ret[12:].split(b"\x00") return unique_id, str(mcu_name, "ascii"), str(board_name, "ascii") def reset(self): self.i2c.writeto(self.addr, struct.pack("<B", I2C_CMD_RESET)) # we don't expect any response def getlayout(self): self.i2c.writeto(self.addr, struct.pack("<B", I2C_CMD_GETLAYOUT)) layout = self.wait_response() id, flash_addr, layout = layout.split(b"/") assert id == b"@Internal Flash " flash_addr = int(flash_addr, 16) pages = [] for chunk in layout.split(b","): n, sz = chunk.split(b"*") n = int(n) assert sz.endswith(b"Kg") sz = int(sz[:-2]) * 1024 for i in range(n): pages.append((flash_addr, sz)) flash_addr += sz return pages def pageerase(self, addr): self.i2c.writeto(self.addr, struct.pack("<BI", I2C_CMD_PAGEERASE, addr)) self.wait_empty_response() def setrdaddr(self, addr): self.i2c.writeto(self.addr, struct.pack("<BI", I2C_CMD_SETRDADDR, addr)) self.wait_empty_response() def setwraddr(self, addr): self.i2c.writeto(self.addr, struct.pack("<BI", I2C_CMD_SETWRADDR, addr)) self.wait_empty_response() def read(self, n): self.i2c.writeto(self.addr, struct.pack("<BB", I2C_CMD_READ, n)) return self.wait_response() def write(self, buf): self.i2c.writeto(self.addr, struct.pack("<B", I2C_CMD_WRITE) + buf) self.wait_empty_response() def calchash(self, n): self.i2c.writeto(self.addr, struct.pack("<BI", I2C_CMD_CALCHASH, n)) return self.wait_response() def markvalid(self): self.i2c.writeto(self.addr, struct.pack("<B", I2C_CMD_MARKVALID)) self.wait_empty_response() def deployfile(self, filename, addr): pages = self.getlayout() page_erased = [False] * len(pages) buf = bytearray(128) # maximum payload supported by I2C protocol start_addr = addr self.setwraddr(addr) fsize = os.stat(filename)[6] local_sha = hashlib.sha256() print("Deploying %s to location 0x%08x" % (filename, addr)) with open(filename, "rb") as f: t0 = time.ticks_ms() while True: n = f.readinto(buf) if n == 0: break # check if we need to erase the page for i, p in enumerate(pages): if p[0] <= addr < p[0] + p[1]: # found page if not page_erased[i]: print( "\r% 3u%% erase 0x%08x" % (100 * (addr - start_addr) // fsize, addr), end="", ) self.pageerase(addr) page_erased[i] = True break else: raise Exception("address 0x%08x not valid" % addr) # write the data self.write(buf) # update local SHA256, with validity bits set if addr == start_addr: buf[0] |= 3 if n == len(buf): local_sha.update(buf) else: local_sha.update(buf[:n]) addr += n ntotal = addr - start_addr if ntotal % 2048 == 0 or ntotal == fsize: print("\r% 3u%% % 7u bytes " % (100 * ntotal // fsize, ntotal), end="") t1 = time.ticks_ms() print() print("rate: %.2f KiB/sec" % (1024 * ntotal / (t1 - t0) / 1000)) local_sha = local_sha.digest() print("Local SHA256: ", "".join("%02x" % x for x in local_sha)) self.setrdaddr(start_addr) remote_sha = self.calchash(ntotal) print("Remote SHA256:", "".join("%02x" % x for x in remote_sha)) if local_sha == remote_sha: print("Marking app firmware as valid") self.markvalid() self.reset()
3,260
1,875
<filename>core/src/main/java/org/teavm/debugging/Debugger.java /* * Copyright 2014 <NAME>. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.teavm.debugging; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.teavm.common.Promise; import org.teavm.debugging.information.DebugInformation; import org.teavm.debugging.information.DebugInformationProvider; import org.teavm.debugging.information.DebuggerCallSite; import org.teavm.debugging.information.DebuggerCallSiteVisitor; import org.teavm.debugging.information.DebuggerStaticCallSite; import org.teavm.debugging.information.DebuggerVirtualCallSite; import org.teavm.debugging.information.GeneratedLocation; import org.teavm.debugging.information.SourceLocation; import org.teavm.debugging.javascript.JavaScriptBreakpoint; import org.teavm.debugging.javascript.JavaScriptCallFrame; import org.teavm.debugging.javascript.JavaScriptDebugger; import org.teavm.debugging.javascript.JavaScriptDebuggerListener; import org.teavm.debugging.javascript.JavaScriptLocation; import org.teavm.debugging.javascript.JavaScriptVariable; import org.teavm.model.MethodReference; public class Debugger { private Set<DebuggerListener> listeners = new LinkedHashSet<>(); private JavaScriptDebugger javaScriptDebugger; private DebugInformationProvider debugInformationProvider; private List<JavaScriptBreakpoint> temporaryBreakpoints = new ArrayList<>(); private Map<String, DebugInformation> debugInformationMap = new HashMap<>(); private Map<String, Set<DebugInformation>> debugInformationFileMap = new HashMap<>(); private Map<DebugInformation, String> scriptMap = new HashMap<>(); private Map<JavaScriptBreakpoint, Breakpoint> breakpointMap = new HashMap<>(); private Set<Breakpoint> breakpoints = new LinkedHashSet<>(); private Set<? extends Breakpoint> readonlyBreakpoints = Collections.unmodifiableSet(breakpoints); private CallFrame[] callStack; private Set<String> scriptNames = new LinkedHashSet<>(); public Debugger(JavaScriptDebugger javaScriptDebugger, DebugInformationProvider debugInformationProvider) { this.javaScriptDebugger = javaScriptDebugger; this.debugInformationProvider = debugInformationProvider; javaScriptDebugger.addListener(javaScriptListener); } public JavaScriptDebugger getJavaScriptDebugger() { return javaScriptDebugger; } public void addListener(DebuggerListener listener) { listeners.add(listener); } public void removeListener(DebuggerListener listener) { listeners.remove(listener); } public Promise<Void> suspend() { return javaScriptDebugger.suspend(); } public Promise<Void> resume() { return javaScriptDebugger.resume(); } public Promise<Void> stepInto() { return step(true); } public Promise<Void> stepOut() { return javaScriptDebugger.stepOut(); } public Promise<Void> stepOver() { return step(false); } private Promise<Void> jsStep(boolean enterMethod) { return enterMethod ? javaScriptDebugger.stepInto() : javaScriptDebugger.stepOver(); } private Promise<Void> step(boolean enterMethod) { CallFrame[] callStack = getCallStack(); if (callStack == null || callStack.length == 0) { return jsStep(enterMethod); } CallFrame recentFrame = callStack[0]; if (recentFrame.getLocation() == null || recentFrame.getLocation().getFileName() == null || recentFrame.getLocation().getLine() < 0) { return jsStep(enterMethod); } Set<JavaScriptLocation> successors = new HashSet<>(); boolean first = true; for (CallFrame frame : callStack) { boolean exits; String script = frame.getOriginalLocation().getScript(); DebugInformation debugInfo = debugInformationMap.get(script); if (frame.getLocation() != null && frame.getLocation().getFileName() != null && frame.getLocation().getLine() >= 0 && debugInfo != null) { exits = addFollowing(debugInfo, frame.getLocation(), script, new HashSet<>(), successors); if (enterMethod) { CallSiteSuccessorFinder successorFinder = new CallSiteSuccessorFinder(debugInfo, script, successors); DebuggerCallSite[] callSites = debugInfo.getCallSites(frame.getLocation()); for (DebuggerCallSite callSite : callSites) { callSite.acceptVisitor(successorFinder); } } } else { exits = true; } if (!exits) { break; } enterMethod = false; if (!first && frame.getLocation() != null) { for (GeneratedLocation location : debugInfo.getGeneratedLocations(frame.getLocation())) { successors.add(new JavaScriptLocation(script, location.getLine(), location.getColumn())); } } first = false; } List<Promise<Void>> jsBreakpointPromises = new ArrayList<>(); for (JavaScriptLocation successor : successors) { jsBreakpointPromises.add(javaScriptDebugger.createBreakpoint(successor) .thenVoid(temporaryBreakpoints::add)); } return Promise.allVoid(jsBreakpointPromises).thenAsync(v -> javaScriptDebugger.resume()); } static class CallSiteSuccessorFinder implements DebuggerCallSiteVisitor { private DebugInformation debugInfo; private String script; Set<JavaScriptLocation> locations; CallSiteSuccessorFinder(DebugInformation debugInfo, String script, Set<JavaScriptLocation> locations) { this.debugInfo = debugInfo; this.script = script; this.locations = locations; } @Override public void visit(DebuggerVirtualCallSite callSite) { for (MethodReference potentialMethod : debugInfo.getOverridingMethods(callSite.getMethod())) { for (GeneratedLocation loc : debugInfo.getMethodEntrances(potentialMethod)) { loc = debugInfo.getStatementLocation(loc); locations.add(new JavaScriptLocation(script, loc.getLine(), loc.getColumn())); } } } @Override public void visit(DebuggerStaticCallSite callSite) { for (GeneratedLocation loc : debugInfo.getMethodEntrances(callSite.getMethod())) { loc = debugInfo.getStatementLocation(loc); locations.add(new JavaScriptLocation(script, loc.getLine(), loc.getColumn())); } } } private boolean addFollowing(DebugInformation debugInfo, SourceLocation location, String script, Set<SourceLocation> visited, Set<JavaScriptLocation> successors) { if (!visited.add(location)) { return false; } SourceLocation[] following = debugInfo.getFollowingLines(location); boolean exits = false; if (following != null) { for (SourceLocation successor : following) { if (successor == null) { exits = true; } else { Collection<GeneratedLocation> genLocations = debugInfo.getGeneratedLocations(successor); if (!genLocations.isEmpty()) { for (GeneratedLocation loc : genLocations) { loc = debugInfo.getStatementLocation(loc); successors.add(new JavaScriptLocation(script, loc.getLine(), loc.getColumn())); } } else { exits |= addFollowing(debugInfo, successor, script, visited, successors); } } } } return exits; } private List<DebugInformation> debugInformationBySource(String sourceFile) { Set<DebugInformation> list = debugInformationFileMap.get(sourceFile); return list != null ? new ArrayList<>(list) : Collections.emptyList(); } public Promise<Void> continueToLocation(SourceLocation location) { return continueToLocation(location.getFileName(), location.getLine()); } public Promise<Void> continueToLocation(String fileName, int line) { if (!javaScriptDebugger.isSuspended()) { return Promise.VOID; } List<Promise<Void>> promises = new ArrayList<>(); for (DebugInformation debugInformation : debugInformationBySource(fileName)) { Collection<GeneratedLocation> locations = debugInformation.getGeneratedLocations(fileName, line); for (GeneratedLocation location : locations) { JavaScriptLocation jsLocation = new JavaScriptLocation(scriptMap.get(debugInformation), location.getLine(), location.getColumn()); promises.add(javaScriptDebugger.createBreakpoint(jsLocation).thenVoid(temporaryBreakpoints::add)); } } return Promise.allVoid(promises).thenAsync(v -> javaScriptDebugger.resume()); } public boolean isSuspended() { return javaScriptDebugger.isSuspended(); } public Promise<Breakpoint> createBreakpoint(String file, int line) { return createBreakpoint(new SourceLocation(file, line)); } public Collection<? extends String> getSourceFiles() { return debugInformationFileMap.keySet(); } public Promise<Breakpoint> createBreakpoint(SourceLocation location) { Breakpoint breakpoint = new Breakpoint(this, location); breakpoints.add(breakpoint); return updateInternalBreakpoints(breakpoint).then(v -> { updateBreakpointStatus(breakpoint, false); return breakpoint; }); } public Set<? extends Breakpoint> getBreakpoints() { return readonlyBreakpoints; } private Promise<Void> updateInternalBreakpoints(Breakpoint breakpoint) { if (breakpoint.isDestroyed()) { return Promise.VOID; } List<Promise<Void>> promises = new ArrayList<>(); for (JavaScriptBreakpoint jsBreakpoint : breakpoint.jsBreakpoints) { breakpointMap.remove(jsBreakpoint); promises.add(jsBreakpoint.destroy()); } List<JavaScriptBreakpoint> jsBreakpoints = new ArrayList<>(); SourceLocation location = breakpoint.getLocation(); for (DebugInformation debugInformation : debugInformationBySource(location.getFileName())) { Collection<GeneratedLocation> locations = debugInformation.getGeneratedLocations(location); for (GeneratedLocation genLocation : locations) { JavaScriptLocation jsLocation = new JavaScriptLocation(scriptMap.get(debugInformation), genLocation.getLine(), genLocation.getColumn()); promises.add(javaScriptDebugger.createBreakpoint(jsLocation).thenVoid(jsBreakpoint -> { jsBreakpoints.add(jsBreakpoint); breakpointMap.put(jsBreakpoint, breakpoint); })); } } breakpoint.jsBreakpoints = jsBreakpoints; return Promise.allVoid(promises); } private DebuggerListener[] getListeners() { return listeners.toArray(new DebuggerListener[0]); } private void updateBreakpointStatus(Breakpoint breakpoint, boolean fireEvent) { boolean valid = false; for (JavaScriptBreakpoint jsBreakpoint : breakpoint.jsBreakpoints) { if (jsBreakpoint.isValid()) { valid = true; } } if (breakpoint.valid != valid) { breakpoint.valid = valid; if (fireEvent) { for (DebuggerListener listener : getListeners()) { listener.breakpointStatusChanged(breakpoint); } } } } public CallFrame[] getCallStack() { if (!isSuspended()) { return null; } if (callStack == null) { // TODO: with inlining enabled we can have several JVM methods compiled into one JavaScript function // so we must consider this case. List<CallFrame> frames = new ArrayList<>(); boolean wasEmpty = false; for (JavaScriptCallFrame jsFrame : javaScriptDebugger.getCallStack()) { DebugInformation debugInformation = debugInformationMap.get(jsFrame.getLocation().getScript()); SourceLocation loc; if (debugInformation != null) { loc = debugInformation.getSourceLocation(jsFrame.getLocation().getLine(), jsFrame.getLocation().getColumn()); } else { loc = null; } boolean empty = loc == null || (loc.getFileName() == null && loc.getLine() < 0); MethodReference method = !empty && debugInformation != null ? debugInformation.getMethodAt(jsFrame.getLocation().getLine(), jsFrame.getLocation().getColumn()) : null; if (!empty || !wasEmpty) { frames.add(new CallFrame(this, jsFrame, loc, method, debugInformation)); } wasEmpty = empty; } callStack = frames.toArray(new CallFrame[0]); } return callStack.clone(); } Promise<Map<String, Variable>> createVariables(JavaScriptCallFrame jsFrame, DebugInformation debugInformation) { return jsFrame.getVariables().then(jsVariables -> { Map<String, Variable> vars = new HashMap<>(); for (Map.Entry<String, ? extends JavaScriptVariable> entry : jsVariables.entrySet()) { JavaScriptVariable jsVar = entry.getValue(); String[] names = mapVariable(entry.getKey(), jsFrame.getLocation()); Value value = new Value(this, debugInformation, jsVar.getValue()); for (String name : names) { if (name == null) { name = "js:" + jsVar.getName(); } vars.put(name, new Variable(name, value)); } } return Collections.unmodifiableMap(vars); }); } private void addScript(String name) { if (!name.isEmpty()) { scriptNames.add(name); } if (debugInformationMap.containsKey(name)) { updateBreakpoints(); return; } DebugInformation debugInfo = debugInformationProvider.getDebugInformation(name); if (debugInfo == null) { return; } debugInformationMap.put(name, debugInfo); for (String sourceFile : debugInfo.getFilesNames()) { Set<DebugInformation> list = debugInformationFileMap.get(sourceFile); if (list == null) { list = new HashSet<>(); debugInformationFileMap.put(sourceFile, list); } list.add(debugInfo); } scriptMap.put(debugInfo, name); updateBreakpoints(); } public Set<? extends String> getScriptNames() { return scriptNames; } private void updateBreakpoints() { for (Breakpoint breakpoint : breakpoints) { updateInternalBreakpoints(breakpoint).thenVoid(v -> updateBreakpointStatus(breakpoint, true)); } } public boolean isAttached() { return javaScriptDebugger.isAttached(); } public void detach() { javaScriptDebugger.detach(); } void destroyBreakpoint(Breakpoint breakpoint) { for (JavaScriptBreakpoint jsBreakpoint : breakpoint.jsBreakpoints) { jsBreakpoint.destroy(); breakpointMap.remove(jsBreakpoint); } breakpoint.jsBreakpoints = new ArrayList<>(); breakpoints.remove(breakpoint); } private void fireResumed() { for (DebuggerListener listener : getListeners()) { listener.resumed(); } } private void firePaused(JavaScriptBreakpoint breakpoint) { List<JavaScriptBreakpoint> temporaryBreakpoints = new ArrayList<>(this.temporaryBreakpoints); this.temporaryBreakpoints.clear(); List<Promise<Void>> promises = new ArrayList<>(); for (JavaScriptBreakpoint jsBreakpoint : temporaryBreakpoints) { promises.add(jsBreakpoint.destroy()); } callStack = null; Promise.allVoid(promises).thenVoid(v -> { Breakpoint javaBreakpoint = null; if (breakpoint != null && !temporaryBreakpoints.contains(breakpoint)) { javaBreakpoint = breakpointMap.get(breakpoint); } for (DebuggerListener listener : getListeners()) { listener.paused(javaBreakpoint); } }); } private void fireAttached() { for (Breakpoint breakpoint : breakpoints) { updateInternalBreakpoints(breakpoint).thenVoid(v -> updateBreakpointStatus(breakpoint, false)); } for (DebuggerListener listener : getListeners()) { listener.attached(); } } private void fireDetached() { for (Breakpoint breakpoint : breakpoints) { updateBreakpointStatus(breakpoint, false); } for (DebuggerListener listener : getListeners()) { listener.detached(); } } private void fireBreakpointChanged(JavaScriptBreakpoint jsBreakpoint) { Breakpoint breakpoint = breakpointMap.get(jsBreakpoint); if (breakpoint != null) { updateBreakpointStatus(breakpoint, true); } } String[] mapVariable(String variable, JavaScriptLocation location) { DebugInformation debugInfo = debugInformationMap.get(location.getScript()); if (debugInfo == null) { return new String[0]; } return debugInfo.getVariableMeaningAt(location.getLine(), location.getColumn(), variable); } String mapField(String className, String jsField) { for (DebugInformation debugInfo : debugInformationMap.values()) { String meaning = debugInfo.getFieldMeaning(className, jsField); if (meaning != null) { return meaning; } } return null; } private JavaScriptDebuggerListener javaScriptListener = new JavaScriptDebuggerListener() { @Override public void resumed() { fireResumed(); } @Override public void paused(JavaScriptBreakpoint breakpoint) { firePaused(breakpoint); } @Override public void scriptAdded(String name) { addScript(name); } @Override public void attached() { fireAttached(); } @Override public void detached() { fireDetached(); } @Override public void breakpointChanged(JavaScriptBreakpoint jsBreakpoint) { fireBreakpointChanged(jsBreakpoint); } }; }
8,293
377
/******************************************************************************* * * Copyright 2012 Impetus Infotech. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. ******************************************************************************/ package com.impetus.kundera.client.cassandra.pelops.crud; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.persistence.EntityManager; import javax.persistence.EntityManagerFactory; import javax.persistence.Persistence; import junit.framework.Assert; import org.apache.cassandra.auth.PasswordAuthenticator; import org.apache.cassandra.thrift.AuthenticationException; import org.apache.cassandra.thrift.AuthenticationRequest; import org.apache.cassandra.thrift.AuthorizationException; import org.apache.cassandra.thrift.CfDef; import org.apache.cassandra.thrift.ColumnDef; import org.apache.cassandra.thrift.IndexType; import org.apache.cassandra.thrift.InvalidRequestException; import org.apache.cassandra.thrift.KsDef; import org.apache.cassandra.thrift.NotFoundException; import org.apache.cassandra.thrift.SchemaDisagreementException; import org.apache.cassandra.thrift.TimedOutException; import org.apache.cassandra.thrift.UnavailableException; import org.apache.thrift.TException; import org.junit.After; import org.junit.Before; import org.junit.Test; import com.impetus.kundera.client.cassandra.persistence.CassandraCli; /** * Cassandra authentication test. * * @author vivek.mishra */ public class CassandraAuthenticationTest /*extends BaseTest*/ { private String userName; private String password; /** * Sets the up. * * @throws Exception * the exception */ @Before public void setUp() throws Exception { // userName = "kunderauser"; // password = "<PASSWORD>"; // System.setProperty("passwd.properties", // "../resources/passwd.properties"); // System.setProperty("access.properties", // "../resources/access.properties"); // // CassandraCli.cassandraSetUp(); } @Test public void testDummy() { // do nothing. // please do not modify this test at all! } /** * Authenticate with valid credentials. */ // @Test public void authenticateWithValidCredentials() { try { EntityManagerFactory emf = Persistence.createEntityManagerFactory("authenticationTest"); Assert.assertNotNull(emf); loadData(); EntityManager em = emf.createEntityManager(); Assert.assertNotNull(em); PersonAuth o = new PersonAuth(); o.setPersonId("1"); o.setPersonName("vivek"); o.setAge(10); em.persist(o); PersonAuth p = em.find(PersonAuth.class, "1"); Assert.assertNotNull(p); } catch (Exception e) { Assert.fail(e.getMessage()); } } /** * Authenticate with invalid credentials. * * @throws SchemaDisagreementException * @throws TimedOutException * @throws UnavailableException * @throws InvalidRequestException * @throws TException * @throws IOException */ // @Test public void authenticateWithInValidCredentials() throws IOException, TException, InvalidRequestException, UnavailableException, TimedOutException, SchemaDisagreementException { EntityManagerFactory emf = null; EntityManager em = null; try { userName = "kunderauser"; password = "<PASSWORD>"; loadData(); emf = Persistence.createEntityManagerFactory("invalidauthenticationTest"); em = emf.createEntityManager(); Assert.fail("Shouldn't be called"); } catch (AuthenticationException e) { Assert.assertNull(emf); Assert.assertNull(em); userName = "kunderauser"; password = "<PASSWORD>"; } catch (AuthorizationException e) { Assert.assertNull(emf); Assert.assertNull(em); userName = "kunderauser"; password = "<PASSWORD>"; } } /** * No authentication test. * */ // @Test public void noAuthenticationTest() { try { EntityManagerFactory emf = Persistence.createEntityManagerFactory("cass_pu"); Assert.assertNotNull(emf); EntityManager em = emf.createEntityManager(); Assert.assertNotNull(em); } catch (Exception e) { Assert.fail(e.getMessage()); } } /** * Load cassandra specific data. * * @throws TException * the t exception * @throws InvalidRequestException * the invalid request exception * @throws UnavailableException * the unavailable exception * @throws TimedOutException * the timed out exception * @throws SchemaDisagreementException * the schema disagreement exception * @throws AuthorizationException * @throws AuthenticationException */ private void loadData() throws TException, InvalidRequestException, UnavailableException, TimedOutException, SchemaDisagreementException, AuthenticationException, AuthorizationException { KsDef ksDef = null; CfDef user_Def = new CfDef(); user_Def.name = "PERSON"; user_Def.keyspace = "KunderaAuthentication"; user_Def.setComparator_type("UTF8Type"); user_Def.setDefault_validation_class("UTF8Type"); ColumnDef columnDef = new ColumnDef(ByteBuffer.wrap("PERSON_NAME".getBytes()), "UTF8Type"); columnDef.index_type = IndexType.KEYS; user_Def.addToColumn_metadata(columnDef); ColumnDef columnDef1 = new ColumnDef(ByteBuffer.wrap("AGE".getBytes()), "UTF8Type"); columnDef1.index_type = IndexType.KEYS; user_Def.addToColumn_metadata(columnDef1); List<CfDef> cfDefs = new ArrayList<CfDef>(); cfDefs.add(user_Def); try { PasswordAuthenticator idAuth = new PasswordAuthenticator(); Map<String, String> credentials = new HashMap<String, String>(); credentials.put(idAuth.USERNAME_KEY, userName); credentials.put(idAuth.PASSWORD_KEY, password); CassandraCli.client.login(new AuthenticationRequest(credentials)); CassandraCli.createKeySpace("KunderaAuthentication"); ksDef = CassandraCli.client.describe_keyspace("KunderaAuthentication"); CassandraCli.client.set_keyspace("KunderaAuthentication"); List<CfDef> cfDefn = ksDef.getCf_defs(); for (CfDef cfDef1 : cfDefn) { if (cfDef1.getName().equalsIgnoreCase("PERSON")) { CassandraCli.client.system_drop_column_family("PERSON"); } } CassandraCli.client.system_add_column_family(user_Def); } catch (NotFoundException e) { ksDef = new KsDef("KunderaAuthentication", "org.apache.cassandra.locator.SimpleStrategy", cfDefs); ksDef.setReplication_factor(1); CassandraCli.client.system_add_keyspace(ksDef); } CassandraCli.client.set_keyspace("KunderaAuthentication"); } /** * Tear down. * * @throws TException * @throws AuthorizationException * @throws AuthenticationException * * @throws Exception * the exception */ @After public void tearDown() throws AuthenticationException, AuthorizationException, TException { // Map<String, String> credentials = new HashMap<String, String>(); // credentials.put(IAuthenticator.USERNAME_KEY, userName); // credentials.put(IAuthenticator.PASSWORD_KEY, password); // CassandraCli.client.login(new AuthenticationRequest(credentials)); // // CassandraCli.dropKeySpace("KunderaAuthentication"); } }
3,654
2,151
<filename>media/gpu/windows/return_on_failure.h // Copyright 2017 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef MEDIA_GPU_WINDOWS_RETURN_ON_FAILURE_H_ #define MEDIA_GPU_WINDOWS_RETURN_ON_FAILURE_H_ #define RETURN_ON_FAILURE(result, log, ret) \ do { \ if (!(result)) { \ DLOG(ERROR) << log; \ return ret; \ } \ } while (0) #endif // MEDIA_GPU_D3D11_WINDOWS_RETURN_ON_FAILURE_H_
349
348
{"nom":"Soulangy","circ":"3ème circonscription","dpt":"Calvados","inscrits":210,"abs":113,"votants":97,"blancs":10,"nuls":2,"exp":85,"res":[{"nuance":"MDM","nom":"<NAME>","voix":54},{"nuance":"LR","nom":"<NAME>","voix":31}]}
90
12,278
#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (C) 2020 <NAME> # # Distributed under the Boost Software License, Version 1.0. (See # accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) from generate_unicode_normalization_data import cccs from generate_unicode_normalization_data import expand_decomp_canonical from generate_unicode_normalization_data import get_decompositions from generate_unicode_collation_data import get_frac_uca_cet from generate_unicode_collation_data import ce_to_cpp import re perf_test_form = decls = '''\ // Copyright (C) 2020 <NAME> // // Distributed under the Boost Software License, Version 1.0. (See // accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) // Warning! This file is autogenerated. #include <boost/text/collate.hpp> #include <boost/text/collation_table.hpp> #include <benchmark/benchmark.h> boost::text::detail::collation_trie_t const & trie() {{ static auto const retval = boost::text::detail::make_default_trie(); return retval; }} boost::text::collation_table const & table() {{ static auto const retval = boost::text::default_collation_table(); return retval; }} {0} BENCHMARK_MAIN() ''' relative_collation_tests_form = '''\ // Copyright (C) 2020 <NAME> // // Distributed under the Boost Software License, Version 1.0. (See // accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) // Warning! This file is autogenerated. #include "collation_tests.hpp" #include <algorithm> #include <gtest/gtest.h> std::array<uint32_t, {1}> const g_cps = {{{{ {0} }}}}; std::array<std::pair<uint32_t, uint32_t>, {3}> const g_cp_ranges = {{{{ {2} }}}}; TEST(collation, relative_{4}_{5}_fcc) {{ std::vector<uint32_t> prev_un_norm; std::vector<uint32_t> prev_cps; boost::text::text_sort_key prev_key; std::vector<uint32_t> curr_un_norm; std::vector<uint32_t> curr_cps; boost::text::text_sort_key curr_key; bool first = true; std::pair<uint32_t, uint32_t> prev_r; for (auto r : g_cp_ranges) {{ curr_un_norm.assign( g_cps.begin() + r.first, g_cps.begin() + r.second); curr_cps.clear(); boost::text::normalize<boost::text::nf::fcc>( curr_un_norm, std::back_inserter(curr_cps)); curr_key = boost::text::collation_sort_key( curr_cps.begin(), curr_cps.end(), table(), boost::text::collation_strength::identical, boost::text::case_first::off, boost::text::case_level::off, boost::text::variable_weighting::{4}); if (!first) {{ EXPECT_LE(compare(prev_key, curr_key), 0) << "prev un-norm cps: " << ce_dumper(prev_un_norm) << "prev_cps (FCC): " << ce_dumper(prev_cps) << "prev_key: " << ce_dumper(prev_key) << "\\n" << "curr un-norm cps: " << ce_dumper(curr_un_norm) << "curr_cps (FCC): " << ce_dumper(curr_cps) << "curr_key: " << ce_dumper(curr_key) << "\\n" ; std::string prev = boost::text::to_string(prev_cps); std::string curr = boost::text::to_string(curr_cps); auto const prev_32 = boost::text::as_utf32(prev); auto const curr_32 = boost::text::as_utf32(curr); EXPECT_LE( boost::text::collate( prev_32.begin(), prev_32.end(), curr_32.begin(), curr_32.end(), table(), boost::text::collation_strength::identical, boost::text::case_first::off, boost::text::case_level::off, boost::text::variable_weighting::{4}), 0) << "prev un-norm cps: " << ce_dumper(prev_un_norm) << "prev_cps (FCC): " << ce_dumper(prev_cps) << "prev_key: " << ce_dumper(prev_key) << "\\n" << "curr un-norm cps: " << ce_dumper(curr_un_norm) << "curr_cps (FCC): " << ce_dumper(curr_cps) << "curr_key: " << ce_dumper(curr_key) << "\\n" ; }} std::swap(curr_un_norm, prev_un_norm); std::swap(curr_cps, prev_cps); std::swap(curr_key, prev_key); first = false; prev_r = r; }} }} TEST(collation, relative_{4}_{5}_nfd) {{ std::vector<uint32_t> prev_un_norm; std::vector<uint32_t> prev_cps; boost::text::text_sort_key prev_key; std::vector<uint32_t> curr_un_norm; std::vector<uint32_t> curr_cps; boost::text::text_sort_key curr_key; bool first = true; std::pair<uint32_t, uint32_t> prev_r; for (auto r : g_cp_ranges) {{ curr_un_norm.assign( g_cps.begin() + r.first, g_cps.begin() + r.second); curr_cps.clear(); boost::text::normalize<boost::text::nf::d>( curr_un_norm, std::back_inserter(curr_cps)); curr_key = boost::text::collation_sort_key( curr_cps.begin(), curr_cps.end(), table(), boost::text::collation_strength::identical, boost::text::case_first::off, boost::text::case_level::off, boost::text::variable_weighting::{4}); if (!first) {{ EXPECT_LE(compare(prev_key, curr_key), 0) << "prev un-norm cps: " << ce_dumper(prev_un_norm) << "prev_cps (NFD): " << ce_dumper(prev_cps) << "prev_key: " << ce_dumper(prev_key) << "\\n" << "curr un-norm cps: " << ce_dumper(curr_un_norm) << "curr_cps (NFD): " << ce_dumper(curr_cps) << "curr_key: " << ce_dumper(curr_key) << "\\n" ; std::string prev = boost::text::to_string(prev_cps); std::string curr = boost::text::to_string(curr_cps); auto const prev_32 = boost::text::as_utf32(prev); auto const curr_32 = boost::text::as_utf32(curr); EXPECT_LE( boost::text::collate( prev_32.begin(), prev_32.end(), curr_32.begin(), curr_32.end(), table(), boost::text::collation_strength::identical, boost::text::case_first::off, boost::text::case_level::off, boost::text::variable_weighting::{4}), 0) << "prev un-norm cps: " << ce_dumper(prev_un_norm) << "prev_cps (NFD): " << ce_dumper(prev_cps) << "prev_key: " << ce_dumper(prev_key) << "\\n" << "curr un-norm cps: " << ce_dumper(curr_un_norm) << "curr_cps (NFD): " << ce_dumper(curr_cps) << "curr_key: " << ce_dumper(curr_key) << "\\n" ; }} std::swap(curr_un_norm, prev_un_norm); std::swap(curr_cps, prev_cps); std::swap(curr_key, prev_key); first = false; prev_r = r; }} }} ''' def indices_to_list(indices, all_cps): return all_cps[indices[0]:indices[1]] def generate_lookup_perf_test(ducet): chunk_size = 50 chunks_per_file = 100 chunk_arrays = [] chunk = 0 i = 0 cps = [] cp_ranges = [] for k,v in sorted(ducet.items()): cp_ranges.append((len(cps), len(cps) + len(k))) cps += list(k) i += 1 if i == chunk_size: chunk_arrays.append((cps, cp_ranges)) chunk += 1 i = 0 cps = [] cp_ranges = [] chunk_idx = 0 lines = '' for i in range(len(chunk_arrays)): if i != 0 and i % chunks_per_file == 0: cpp_file = open('collation_element_lookup_perf_{0:03}.cpp'.format(chunk_idx), 'w') cpp_file.write(perf_test_form.format(lines)) chunk_idx += 1 lines = '' cps = chunk_arrays[i][0] cp_ranges = chunk_arrays[i][1] lines += '''\ uint32_t cps_{0:03}[] = {{ {1} }}; void BM_collation_element_lookup_{0:03}(benchmark::State & state) {{ while (state.KeepRunning()) {{ '''.format(i, ', '.join(map(lambda x: type(x) == str and '0x' + x or hex(x), cps)), len(cps)) for first,last in cp_ranges: lines += '''\ benchmark::DoNotOptimize(trie().longest_match(cps_{0:03} + {1}, cps_{0:03} + {2})); '''.format(i, first, last) lines += '''\ }} }} BENCHMARK(BM_collation_element_lookup_{0:03}); '''.format(i) cpp_file = open('collation_element_lookup_perf_{0:03}.cpp'.format(chunk_idx), 'w') cpp_file.write(perf_test_form.format(lines)) def generate_collation_perf_test(ducet): chunk_size = 100 chunks_per_file = 100 chunk_arrays = [] chunk = 0 i = 0 cps = [] cp_ranges = [] for k,v in sorted(ducet.items()): cp_ranges.append((len(cps), len(cps) + len(k))) cps += list(k) i += 1 if i == chunk_size: chunk_arrays.append((cps, cp_ranges)) chunk += 1 i = 0 cps = [] cp_ranges = [] chunk_idx = 0 lines = '' for i in range(len(chunk_arrays)): if i != 0 and i % chunks_per_file == 0: cpp_file = open('collation_perf_{0:03}.cpp'.format(chunk_idx), 'w') cpp_file.write(perf_test_form.format(lines)) chunk_idx += 1 lines = '' cps = chunk_arrays[i][0] cp_ranges = chunk_arrays[i][1] lines += '''\ uint32_t cps_{0:03}[] = {{ {1} }}; void BM_collation_{0:03}(benchmark::State & state) {{ while (state.KeepRunning()) {{ '''.format(i, ', '.join(map(lambda x: type(x) == str and '0x' + x or hex(x), cps)), len(cps)) lines += '''\ benchmark::DoNotOptimize(boost::text::collation_sort_key(cps_{0:03}, cps_{0:03} + {2}, table(), boost::text::collation_strength::quaternary, boost::text::case_first::off, boost::text::case_level::off, boost::text::variable_weighting::shifted)); '''.format(i, cp_ranges[0][0], cp_ranges[-1][1]) lines += '''\ }} }} BENCHMARK(BM_collation_{0:03}); '''.format(i) cpp_file = open('collation_perf_{0:03}.cpp'.format(chunk_idx), 'w') cpp_file.write(perf_test_form.format(lines)) collation_elements_regex = re.compile(r'\[([ |0123456789ABCDEF]+)\]') def generate_relative_collation_tests(filename, weighting): lines = open(filename, 'r').readlines() all_cps = [] all_ranges = [] chunk_idx = 0 line_idx = 0 for line in lines: if line_idx == 25000: cps_string = ', '.join(map(lambda x: '0x' + x, all_cps)) ranges_string = ', '.join(map(lambda x: '{{{}, {}}}'.format(x[0], x[1]), all_ranges)) cpp_file = open('relative_collation_test_{0}_{1}.cpp'.format(weighting, chunk_idx), 'w') cpp_file.write(relative_collation_tests_form.format(cps_string, len(all_cps), ranges_string, len(all_ranges), weighting, chunk_idx)) chunk_idx += 1 all_cps = [] all_ranges = [] line_idx = 0 line = line[:-1] if not line.startswith('#') and len(line) != 0: comment_start = line.find('#') comment = '' if comment_start != -1: comment = line[comment_start + 1:].strip() line = line[:comment_start] if 'surrogate' in comment: continue if 'noncharacter' in comment: continue cps = line.split(';')[0].split(' ') first = len(all_cps) all_cps += cps last = len(all_cps) all_ranges.append((first, last)) line_idx += 1 if line_idx != 0: cps_string = ', '.join(map(lambda x: '0x' + x, all_cps)) ranges_string = ', '.join(map(lambda x: '{{{}, {}}}'.format(x[0], x[1]), all_ranges)) cpp_file = open('relative_collation_test_{0}_{1}.cpp'.format(weighting, chunk_idx), 'w') cpp_file.write(relative_collation_tests_form.format(cps_string, len(all_cps), ranges_string, len(all_ranges), weighting, chunk_idx)) import sys if '--perf' in sys.argv: cet = get_frac_uca_cet('FractionalUCA.txt') generate_lookup_perf_test(cet) generate_collation_perf_test(cet) exit(0) data_file_test_form = '''\ // Copyright (C) 2020 <NAME> // // Distributed under the Boost Software License, Version 1.0. (See // accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) // Warning! This file is autogenerated. #include <boost/text/collation_table.hpp> #include <boost/text/collate.hpp> #include <boost/text/normalize_string.hpp> #include <boost/text/data/all.hpp> #include <gtest/gtest.h> using namespace boost; auto const error = [](std::string const & s) {{ std::cout << s; }}; auto const warning = [](std::string const & s) {{}}; {0} ''' single_data_file_test_form = '''\ //{0} TEST(collation_and_tailoring, data_file_test_{1}) {{ auto const table = {2}; {3} }} ''' def make_string(s, rules, view): type_ = 'std::string' if view: type_ += '_view' if rules: retval = '{}(u8R"({})"'.format(type_, s) else: retval = '{}(u8"{}"'.format(type_, s) if view and 'x00' in s: if rules: retval += ', {}'.format(len(s)) else: retval += ', {}'.format(len(eval("'" + s + "'"))) retval += ')' return retval def comparison_tests(compares, test_strength): retval = '' prev = '' for c in compares: (compare, curr) = filter(lambda x: len(x) != 0, c[0].split(' ')) if compare == '=': strength = test_strength result = 0 else: result = -1 if compare == '<': strength = test_strength if compare == '<i': strength = 'identical' if compare == '<4': strength = 'quaternary' if compare == '<3': strength = 'tertiary' if compare == '<2': strength = 'secondary' if compare == '<1': strength = 'primary' retval += '''\ {{ // {0} # {1} std::string a = {2}; std::string b = {3}; normalize<boost::text::nf::fcc>(a); normalize<boost::text::nf::fcc>(b); EXPECT_EQ(text::collate( text::utf32_range(a), text::utf32_range(b), table, text::collation_strength::{4}), {5}); }} '''.format(c[0], c[1], make_string(prev, False, False), make_string(curr, False, False), strength, result) prev = curr return retval # Expects a file like the collationtest.txt file in ICU. def generate_datafile_collation_tests(lines): test_lines = '' test_idx = 0 line_idx = 0 test_comment = '' rules = '' strength = 'tertiary' compares = [] skip_rules = False while line_idx < len(lines): line = lines[line_idx].strip() if line.startswith('@'): strength = 'tertiary' if line.startswith('#') or len(line) == 0: line_idx += 1 continue elif line.startswith('** test'): test_comment = line[len('** test:'):] line_idx += 1 elif line.startswith('@ root'): skip_rules = False rules = 'default' line_idx += 1 elif line.startswith('@ rules'): skip_rules = False rules = '' line_idx += 1 line = lines[line_idx].strip() while not line.startswith('*') and not line.startswith('%'): comment_start = line.find('#') comment = '' if comment_start != -1: comment = line[comment_start + 1:].strip() line = line[:comment_start].strip() rules += line + ' ' line_idx += 1 if len(lines) <= line_idx: break line = lines[line_idx].strip() if 'import' in rules or '[reorder others]' in rules or \ 'maxVariable' in rules or '@' in rules: rules = '' skip_rules = True elif line.startswith('@ locale'): line_idx += 1 skip_rules = True elif line.startswith('% '): if line[2:].startswith('reorder '): if 'default' in line: rules = 'default' else: if rules == 'default': rules = '' rules += '[' + line[2:] + ']' else: if rules == 'default': rules = '' if 'strength=primary' in line: rules += '[strength 1]' strength = 'primary' elif 'strength=secondary' in line: rules += '[strength 2]' strength = 'secondary' elif 'strength=tertiary' in line: rules += '[strength 3]' strength = 'tertiary' elif 'strength=quaternary' in line: rules += '[strength 4]' strength = 'quaternary' elif 'strength=identical' in line: rules += '[strength I]' strength = 'identical' elif 'backwards=on' in line: rules += '[backwards 2]' else: rules += '[' + line[2:].replace('=', ' ') + ']' line_idx += 1 if 'numeric' in rules or 'maxVariable' in rules or 'import' in rules: rules = '' skip_rules = True elif line.startswith('* compare'): line_idx += 1 compares = [] line = lines[line_idx].strip() while not line.startswith('*') and not line.startswith('@') and not line.startswith('%'): comment_start = line.find('#') comment = '' if comment_start != -1: comment = line[comment_start + 1:].strip() line = line[:comment_start].strip() if line != '' and '\\ud800' not in line.lower() \ and '\\ufff' not in line.lower() \ and '\\udb' not in line.lower() and '\\udc' not in line.lower() \ and '\\udf' not in line.lower(): compares.append((line, comment)) line_idx += 1 if len(lines) <= line_idx: break line = lines[line_idx].strip() table = 'text::default_collation_table()' if len(compares) == 0: test_idx += 1 if skip_rules or len(compares) == 0 or '\\ud800' in rules.lower() \ or '\\udb' in rules.lower() or '\\udc' in rules.lower() \ or '\\udf' in rules.lower(): continue if rules != 'default' and rules != '': table = '''text::tailored_collation_table( {0}, "rules", error, warning)'''.format( make_string(rules.replace(';', '<<').replace(',', '<<<'), True, True) ) test_lines += single_data_file_test_form.format( test_comment, test_idx, table, comparison_tests(compares, strength) ) test_idx += 1 else: line_idx += 1 f = open('tailoring_data.cpp', 'w') f.write(data_file_test_form.format(test_lines)) if '--file' in sys.argv: file_idx = sys.argv.index('--file') + 1 lines = open(sys.argv[file_idx], 'r').readlines() generate_datafile_collation_tests(lines) exit(0) generate_relative_collation_tests('CollationTest_CLDR_NON_IGNORABLE.txt', 'non_ignorable') generate_relative_collation_tests('CollationTest_CLDR_SHIFTED.txt', 'shifted')
10,562
465
package com.ansel.dao; import java.util.List; import org.springframework.data.jpa.repository.JpaRepository; import com.ansel.bean.ManageFee; public interface IManageFeeDao extends JpaRepository<ManageFee, Long>{ public List<ManageFee> findByPayoutMonth(String PayoutMonth); public ManageFee findById(int id); }
119
807
<filename>Master/ThirdParty/x360ce/branches/v4.x/x360ce/InputHook/HookCOM.h #pragma once #include "Common.h" #include "Logger.h" #include "Utils.h" #define CINTERFACE #define _WIN32_DCOM #include <wbemidl.h> #include <ole2.h> #include <oleauto.h> #include <dinput.h> class InputHookDevice; class InputHook; class HookCOM { private: friend class InputHook; static void (WINAPI *TrueCoUninitialize)(); static HRESULT(WINAPI *TrueCoCreateInstance)(REFCLSID rclsid, LPUNKNOWN pUnkOuter, DWORD dwClsContext, REFIID riid, LPVOID FAR* ppv); static HRESULT(WINAPI *TrueCoCreateInstanceEx)(REFCLSID Clsid, IUnknown * punkOuter, DWORD dwClsCtx, COSERVERINFO * pServerInfo, DWORD dwCount, MULTI_QI * pResults); static HRESULT(WINAPI *TrueCoGetClassObject)(REFCLSID rclsid, DWORD dwClsContext, LPVOID pvReserved, REFIID riid, LPVOID FAR * ppv); static HRESULT(STDMETHODCALLTYPE *TrueConnectServer)(IWbemLocator * This, const BSTR strNetworkResource, const BSTR strUser, const BSTR strPassword, const BSTR strLocale, long lSecurityFlags, const BSTR strAuthority, IWbemContext *pCtx, IWbemServices **ppNamespace); static HRESULT(STDMETHODCALLTYPE *TrueCreateInstanceEnum)(IWbemServices * This, const BSTR strFilter, long lFlags, IWbemContext *pCtx, IEnumWbemClassObject **ppEnum); static HRESULT(STDMETHODCALLTYPE *TrueNext)(IEnumWbemClassObject * This, long lTimeout, ULONG uCount, IWbemClassObject **apObjects, ULONG *puReturned); static HRESULT(STDMETHODCALLTYPE *TrueGet)(IWbemClassObject * This, LPCWSTR wszName, long lFlags, VARIANT *pVal, CIMTYPE *pType, long *plFlavor); static HRESULT STDMETHODCALLTYPE HookGet(IWbemClassObject * This, LPCWSTR wszName, long lFlags, VARIANT *pVal, CIMTYPE *pType, long *plFlavor); static HRESULT STDMETHODCALLTYPE HookNext(IEnumWbemClassObject * This, long lTimeout, ULONG uCount, IWbemClassObject **apObjects, ULONG *puReturned); static HRESULT STDMETHODCALLTYPE HookCreateInstanceEnum(IWbemServices * This, const BSTR strFilter, long lFlags, IWbemContext *pCtx, IEnumWbemClassObject **ppEnum); static HRESULT STDMETHODCALLTYPE HookConnectServer(IWbemLocator * This, const BSTR strNetworkResource, const BSTR strUser, const BSTR strPassword, const BSTR strLocale, long lSecurityFlags, const BSTR strAuthority, IWbemContext *pCtx, IWbemServices **ppNamespace); static HRESULT WINAPI HookCoCreateInstanceEx(REFCLSID Clsid, IUnknown * punkOuter, DWORD dwClsCtx, COSERVERINFO * pServerInfo, DWORD dwCount, MULTI_QI * pResults); static HRESULT WINAPI HookCoCreateInstance(REFCLSID rclsid, LPUNKNOWN pUnkOuter, DWORD dwClsContext, REFIID riid, LPVOID FAR* ppv); static HRESULT WINAPI HookCoGetClassObject(REFCLSID rclsid, DWORD dwClsContext, LPVOID pvReserved, REFIID riid, LPVOID FAR * ppv); static void WINAPI HookCoUninitialize(); static void DeviceStringChange(VARIANT *pVal, InputHookDevice* pInputHookDevice, const wchar_t* pNamespace); static HRESULT(STDMETHODCALLTYPE *ConnectServer)(IWbemLocator * This, const BSTR strNetworkResource, const BSTR strUser, const BSTR strPassword, const BSTR strLocale, long lSecurityFlags, const BSTR strAuthority, IWbemContext *pCtx, IWbemServices **ppNamespace); static HRESULT(STDMETHODCALLTYPE *CreateInstanceEnum)(IWbemServices * This, const BSTR strFilter, long lFlags, IWbemContext *pCtx, IEnumWbemClassObject **ppEnum); static HRESULT(STDMETHODCALLTYPE *Next)(IEnumWbemClassObject * This, long lTimeout, ULONG uCount, IWbemClassObject **apObjects, ULONG *puReturned); static HRESULT(STDMETHODCALLTYPE *Get)(IWbemClassObject * This, LPCWSTR wszName, long lFlags, VARIANT *pVal, CIMTYPE *pType, long *plFlavor); };
1,332
575
<reponame>iridium-browser/iridium-browser<filename>chrome/android/java/src/org/chromium/chrome/browser/ChromeBackupAgentImpl.java // Copyright 2016 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.chrome.browser; import android.app.backup.BackupDataInput; import android.app.backup.BackupDataOutput; import android.app.backup.BackupManager; import android.content.SharedPreferences; import android.os.ParcelFileDescriptor; import androidx.annotation.IntDef; import androidx.annotation.VisibleForTesting; import org.chromium.base.ApiCompatibilityUtils; import org.chromium.base.ContextUtils; import org.chromium.base.Log; import org.chromium.base.PathUtils; import org.chromium.base.annotations.NativeMethods; import org.chromium.base.metrics.RecordHistogram; import org.chromium.base.task.PostTask; import org.chromium.chrome.browser.base.SplitCompatApplication; import org.chromium.chrome.browser.firstrun.FirstRunStatus; import org.chromium.chrome.browser.init.AsyncInitTaskRunner; import org.chromium.chrome.browser.init.ChromeBrowserInitializer; import org.chromium.chrome.browser.preferences.ChromePreferenceKeys; import org.chromium.chrome.browser.profiles.Profile; import org.chromium.chrome.browser.signin.services.IdentityServicesProvider; import org.chromium.components.signin.AccountManagerFacadeProvider; import org.chromium.components.signin.AccountUtils; import org.chromium.components.signin.base.CoreAccountInfo; import org.chromium.components.signin.identitymanager.ConsentLevel; import org.chromium.content_public.browser.UiThreadTaskTraits; import org.chromium.content_public.common.ContentProcessInfo; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.util.ArrayList; import java.util.Arrays; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; /** * Backup agent for Chrome, using Android key/value backup. */ @SuppressWarnings("UseSharedPreferencesManagerFromChromeCheck") public class ChromeBackupAgentImpl extends ChromeBackupAgent.Impl { private static final String ANDROID_DEFAULT_PREFIX = "AndroidDefault."; private static final String NATIVE_PREF_PREFIX = "native."; private static final String TAG = "ChromeBackupAgent"; @VisibleForTesting static final String HISTOGRAM_ANDROID_RESTORE_RESULT = "Android.RestoreResult"; // Restore status is used to pass the result of any restore to Chrome's first run, so that // it can be recorded as a histogram. @IntDef({RestoreStatus.NO_RESTORE, RestoreStatus.RESTORE_COMPLETED, RestoreStatus.RESTORE_AFTER_FIRST_RUN, RestoreStatus.BROWSER_STARTUP_FAILED, RestoreStatus.NOT_SIGNED_IN, RestoreStatus.RESTORE_STATUS_RECORDED}) @Retention(RetentionPolicy.SOURCE) public @interface RestoreStatus { // Values must match those in histogram.xml AndroidRestoreResult. int NO_RESTORE = 0; int RESTORE_COMPLETED = 1; int RESTORE_AFTER_FIRST_RUN = 2; int BROWSER_STARTUP_FAILED = 3; int NOT_SIGNED_IN = 4; int NUM_ENTRIES = 5; // Set RESTORE_STATUS_RECORDED when the histogram has been recorded; so that it is only // recorded once. int RESTORE_STATUS_RECORDED = 5; } private static final String RESTORE_STATUS = "android_restore_status"; // Keep track of backup failures, so that we give up in the end on persistent problems. @VisibleForTesting static final String BACKUP_FAILURE_COUNT = "android_backup_failure_count"; @VisibleForTesting static final int MAX_BACKUP_FAILURES = 5; // List of preferences that should be restored unchanged. static final String[] BACKUP_ANDROID_BOOL_PREFS = { ChromePreferenceKeys.DATA_REDUCTION_ENABLED, ChromePreferenceKeys.FIRST_RUN_CACHED_TOS_ACCEPTED, ChromePreferenceKeys.FIRST_RUN_FLOW_COMPLETE, ChromePreferenceKeys.FIRST_RUN_LIGHTWEIGHT_FLOW_COMPLETE, ChromePreferenceKeys.FIRST_RUN_FLOW_SIGNIN_SETUP, ChromePreferenceKeys.PRIVACY_METRICS_REPORTING, }; // Key used to store the email of the signed in account. This email is obtained from // IdentityManager during the backup. static final String SIGNED_IN_ACCOUNT_KEY = "google.services.username"; // Timeout for running the background tasks, needs to be quite long since they may be doing // network access, but must be less than the 1 minute restore timeout to be useful. private static final long BACKGROUND_TASK_TIMEOUT_SECS = 20; /** * Class to save and restore the backup state, used to decide if backups are needed. Since the * backup data is small, and stored as private data by the backup service, this can simply store * and compare a copy of the data. */ private static final class BackupState { private ArrayList<String> mNames; private ArrayList<byte[]> mValues; @SuppressWarnings("unchecked") public BackupState(ParcelFileDescriptor parceledState) throws IOException { if (parceledState == null) return; try { FileInputStream instream = new FileInputStream(parceledState.getFileDescriptor()); ObjectInputStream in = new ObjectInputStream(instream); mNames = (ArrayList<String>) in.readObject(); mValues = (ArrayList<byte[]>) in.readObject(); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } } public BackupState(ArrayList<String> names, ArrayList<byte[]> values) { mNames = names; mValues = values; } @Override public boolean equals(Object other) { if (!(other instanceof BackupState)) return false; BackupState otherBackupState = (BackupState) other; return mNames.equals(otherBackupState.mNames) && Arrays.deepEquals(mValues.toArray(), otherBackupState.mValues.toArray()); } public void save(ParcelFileDescriptor parceledState) throws IOException { FileOutputStream outstream = new FileOutputStream(parceledState.getFileDescriptor()); ObjectOutputStream out = new ObjectOutputStream(outstream); out.writeObject(mNames); out.writeObject(mValues); } } @VisibleForTesting protected boolean accountExistsOnDevice(String userName) { return AccountUtils.findAccountByName( AccountManagerFacadeProvider.getInstance().tryGetGoogleAccounts(), userName) != null; } // TODO (aberent) Refactor the tests to use a mocked ChromeBrowserInitializer, and make this // private again. @VisibleForTesting boolean initializeBrowser() { // Workaround for https://crbug.com/718166. The backup agent is sometimes being started in a // child process, before the child process loads its native library. If backup then loads // the native library the child process is left in a very confused state and crashes. if (ContentProcessInfo.inChildProcess()) { Log.e(TAG, "Backup agent started from child process"); return false; } ChromeBrowserInitializer.getInstance().handleSynchronousStartup(); return true; } private static byte[] booleanToBytes(boolean value) { return value ? new byte[] {1} : new byte[] {0}; } private static boolean bytesToBoolean(byte[] bytes) { return bytes[0] != 0; } @Override public void onBackup(ParcelFileDescriptor oldState, BackupDataOutput data, ParcelFileDescriptor newState) throws IOException { final ArrayList<String> backupNames = new ArrayList<>(); final ArrayList<byte[]> backupValues = new ArrayList<>(); final AtomicReference<CoreAccountInfo> syncAccount = new AtomicReference<>(); // The native preferences can only be read on the UI thread. Boolean nativePrefsRead = PostTask.runSynchronously(UiThreadTaskTraits.DEFAULT, () -> { // Start the browser if necessary, so that Chrome can access the native // preferences. Although Chrome requests the backup, it doesn't happen // immediately, so by the time it does Chrome may not be running. if (!initializeBrowser()) return false; syncAccount.set(IdentityServicesProvider.get() .getIdentityManager(Profile.getLastUsedRegularProfile()) .getPrimaryAccountInfo(ConsentLevel.SYNC)); String[] nativeBackupNames = ChromeBackupAgentImplJni.get().getBoolBackupNames(this); boolean[] nativeBackupValues = ChromeBackupAgentImplJni.get().getBoolBackupValues(this); assert nativeBackupNames.length == nativeBackupValues.length; for (String name : nativeBackupNames) { backupNames.add(NATIVE_PREF_PREFIX + name); } for (boolean val : nativeBackupValues) { backupValues.add(booleanToBytes(val)); } return true; }); SharedPreferences sharedPrefs = ContextUtils.getAppSharedPreferences(); if (!nativePrefsRead) { // Something went wrong reading the native preferences, skip the backup, but try again // later. int backupFailureCount = sharedPrefs.getInt(BACKUP_FAILURE_COUNT, 0) + 1; if (backupFailureCount >= MAX_BACKUP_FAILURES) { // Too many re-tries, give up and force an unconditional backup next time one is // requested. return; } sharedPrefs.edit().putInt(BACKUP_FAILURE_COUNT, backupFailureCount).apply(); if (oldState != null) { try { // Copy the old state to the new state, so that next time Chrome only does a // backup if necessary. BackupState state = new BackupState(oldState); state.save(newState); } catch (Exception e) { // There was no old state, or it was corrupt; leave the newState unwritten, // hence forcing an unconditional backup on the next attempt. } } // Ask Android to schedule a retry. new BackupManager(getBackupAgent()).dataChanged(); return; } // The backup is going to work, clear the failure count. sharedPrefs.edit().remove(BACKUP_FAILURE_COUNT).apply(); // Add the Android boolean prefs. for (String prefName : BACKUP_ANDROID_BOOL_PREFS) { if (sharedPrefs.contains(prefName)) { backupNames.add(ANDROID_DEFAULT_PREFIX + prefName); backupValues.add(booleanToBytes(sharedPrefs.getBoolean(prefName, false))); } } // Finally add the user id. backupNames.add(ANDROID_DEFAULT_PREFIX + SIGNED_IN_ACCOUNT_KEY); backupValues.add(ApiCompatibilityUtils.getBytesUtf8( syncAccount.get() == null ? "" : syncAccount.get().getEmail())); BackupState newBackupState = new BackupState(backupNames, backupValues); // Check if a backup is actually needed. try { BackupState oldBackupState = new BackupState(oldState); if (newBackupState.equals(oldBackupState)) { Log.i(TAG, "Nothing has changed since the last backup. Backup skipped."); newBackupState.save(newState); return; } } catch (IOException e) { // This will happen if Chrome has never written backup data, or if the backup status is // corrupt. Create a new backup in either case. Log.i(TAG, "Can't read backup status file"); } // Write the backup data for (int i = 0; i < backupNames.size(); i++) { data.writeEntityHeader(backupNames.get(i), backupValues.get(i).length); data.writeEntityData(backupValues.get(i), backupValues.get(i).length); } // Remember the backup state. newBackupState.save(newState); Log.i(TAG, "Backup complete"); } @Override public void onRestore(BackupDataInput data, int appVersionCode, ParcelFileDescriptor newState) throws IOException { // TODO(aberent) Check that this is not running on the UI thread. Doing so, however, makes // testing difficult since the test code runs on the UI thread. // Check that the user hasn't already seen FRE (not sure if this can ever happen, but if it // does then restoring the backup will overwrite the user's choices). SharedPreferences sharedPrefs = ContextUtils.getAppSharedPreferences(); if (FirstRunStatus.getFirstRunFlowComplete() || FirstRunStatus.getLightweightFirstRunFlowComplete()) { setRestoreStatus(RestoreStatus.RESTORE_AFTER_FIRST_RUN); Log.w(TAG, "Restore attempted after first run"); return; } final ArrayList<String> backupNames = new ArrayList<>(); final ArrayList<byte[]> backupValues = new ArrayList<>(); String restoredUserName = null; while (data.readNextHeader()) { String key = data.getKey(); int dataSize = data.getDataSize(); byte[] buffer = new byte[dataSize]; data.readEntityData(buffer, 0, dataSize); if (key.equals(ANDROID_DEFAULT_PREFIX + SIGNED_IN_ACCOUNT_KEY)) { restoredUserName = new String(buffer); } else { backupNames.add(key); backupValues.add(buffer); } } // Start and wait for the Async init tasks. This loads the library, and attempts to load the // first run variations seed. Since these are both slow it makes sense to run them in // parallel as Android AsyncTasks, reusing some of Chrome's async startup logic. // // Note that this depends on onRestore being run from a background thread, since // if it were called from the UI thread the broadcast would not be received until after it // exited. final CountDownLatch latch = new CountDownLatch(1); PostTask.runSynchronously(UiThreadTaskTraits.DEFAULT, () -> { // Chrome library loading depends on PathUtils. PathUtils.setPrivateDataDirectorySuffix( SplitCompatApplication.PRIVATE_DATA_DIRECTORY_SUFFIX); createAsyncInitTaskRunner(latch).startBackgroundTasks( false /* allocateChildConnection */, true /* initVariationSeed */); }); try { // Ignore result. It will only be false if it times out. Problems with fetching the // variation seed can be ignored, and other problems will either recover or be repeated // when Chrome is started synchronously. latch.await(BACKGROUND_TASK_TIMEOUT_SECS, TimeUnit.SECONDS); } catch (InterruptedException e) { // Should never happen, but can be ignored (as explained above) anyway. } // Chrome has to be running before it can check if the account exists. Because the native // library is already loaded Chrome startup should be fast. boolean browserStarted = PostTask.runSynchronously(UiThreadTaskTraits.DEFAULT, () -> { // Start the browser if necessary. return initializeBrowser(); }); if (!browserStarted) { // Something went wrong starting Chrome, skip the restore. setRestoreStatus(RestoreStatus.BROWSER_STARTUP_FAILED); return; } // If the user hasn't signed in, or can't sign in, then don't restore anything. if (restoredUserName == null || !accountExistsOnDevice(restoredUserName)) { setRestoreStatus(RestoreStatus.NOT_SIGNED_IN); Log.i(TAG, "Chrome was not signed in with a known account name, not restoring"); return; } // Restore the native preferences on the UI thread PostTask.runSynchronously(UiThreadTaskTraits.DEFAULT, () -> { ArrayList<String> nativeBackupNames = new ArrayList<>(); boolean[] nativeBackupValues = new boolean[backupNames.size()]; int count = 0; int prefixLength = NATIVE_PREF_PREFIX.length(); for (int i = 0; i < backupNames.size(); i++) { String name = backupNames.get(i); if (name.startsWith(NATIVE_PREF_PREFIX)) { nativeBackupNames.add(name.substring(prefixLength)); nativeBackupValues[count] = bytesToBoolean(backupValues.get(i)); count++; } } ChromeBackupAgentImplJni.get().setBoolBackupPrefs(this, nativeBackupNames.toArray(new String[count]), Arrays.copyOf(nativeBackupValues, count)); }); // Now that everything looks good so restore the Android preferences. SharedPreferences.Editor editor = sharedPrefs.edit(); // Only restore preferences that we know about. int prefixLength = ANDROID_DEFAULT_PREFIX.length(); for (int i = 0; i < backupNames.size(); i++) { String name = backupNames.get(i); if (name.startsWith(ANDROID_DEFAULT_PREFIX) && Arrays.asList(BACKUP_ANDROID_BOOL_PREFS) .contains(name.substring(prefixLength))) { editor.putBoolean( name.substring(prefixLength), bytesToBoolean(backupValues.get(i))); } } // Because FirstRunSignInProcessor.FIRST_RUN_FLOW_SIGNIN_COMPLETE is not restored Chrome // will sign in the user on first run to the account in FIRST_RUN_FLOW_SIGNIN_ACCOUNT_NAME // if any. If the rest of FRE has been completed this will happen silently. editor.putString(ChromePreferenceKeys.FIRST_RUN_FLOW_SIGNIN_ACCOUNT_NAME, restoredUserName); editor.apply(); // The silent first run will change things, so there is no point in trying to prevent // additional backups at this stage. Don't write anything to |newState|. setRestoreStatus(RestoreStatus.RESTORE_COMPLETED); Log.i(TAG, "Restore complete"); } @VisibleForTesting AsyncInitTaskRunner createAsyncInitTaskRunner(final CountDownLatch latch) { return new AsyncInitTaskRunner() { @Override protected void onSuccess() { latch.countDown(); } @Override protected void onFailure(Exception failureCause) { // Ignore failure. Problems with the variation seed can be ignored, and other // problems will either recover or be repeated when Chrome is started synchronously. latch.countDown(); } }; } /** * Get the saved result of any restore that may have happened. * * @return the restore status, a RestoreStatus value. */ @VisibleForTesting @RestoreStatus static int getRestoreStatus() { return ContextUtils.getAppSharedPreferences().getInt( RESTORE_STATUS, RestoreStatus.NO_RESTORE); } /** * Save the restore status for later transfer to a histogram. * * @param status the status. */ @VisibleForTesting static void setRestoreStatus(@RestoreStatus int status) { ContextUtils.getAppSharedPreferences().edit().putInt(RESTORE_STATUS, status).apply(); } /** * Record the restore histogram. To be called from Chrome itself once it is running. */ public static void recordRestoreHistogram() { @RestoreStatus int restoreStatus = getRestoreStatus(); // Ensure restore status is only recorded once if (restoreStatus != RestoreStatus.RESTORE_STATUS_RECORDED) { RecordHistogram.recordEnumeratedHistogram( HISTOGRAM_ANDROID_RESTORE_RESULT, restoreStatus, RestoreStatus.NUM_ENTRIES); setRestoreStatus(RestoreStatus.RESTORE_STATUS_RECORDED); } } @NativeMethods interface Natives { String[] getBoolBackupNames(ChromeBackupAgentImpl caller); boolean[] getBoolBackupValues(ChromeBackupAgentImpl caller); void setBoolBackupPrefs(ChromeBackupAgentImpl caller, String[] name, boolean[] value); } }
8,400
1,738
<reponame>BadDevCode/lumberyard /* * All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or * its licensors. * * For complete copyright and license terms please see the LICENSE at the root of this * distribution (the "License"). All use of this software is governed by the License, * or, if provided, by the license below or the license accompanying this file. Do not * remove or modify any license notices. This file is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * */ #include <Tests/SystemComponentFixture.h> #include <MCore/Source/CommandGroup.h> #include <EMotionFX/Source/MotionEventTable.h> #include <EMotionFX/Source/SkeletalMotion.h> #include <EMotionFX/CommandSystem/Source/CommandManager.h> #include <EMotionFX/CommandSystem/Source/MotionEventCommands.h> namespace EMotionFX { using MotionEventCommandTests = SystemComponentFixture; TEST_F(MotionEventCommandTests, RemoveMotionEventTrackCommandTest) { AZStd::string result; CommandSystem::CommandManager commandManager; MCore::CommandGroup commandGroup; SkeletalMotion* motion = SkeletalMotion::Create("SkeletalMotion1"); MotionEventTable* eventTable = motion->GetEventTable(); // Some of the motion event related commands automatically create sync tracks. // This would make data verification harder, so we just manually create it upfront. eventTable->AutoCreateSyncTrack(motion); const char* eventTrackName = "EventTrack1"; MotionEventTrack* eventTrack = aznew MotionEventTrack(eventTrackName, motion); eventTable->AddTrack(eventTrack); EXPECT_EQ(eventTable->GetNumTracks(), 2); CommandSystem::CommandRemoveEventTrack(motion, 0); EXPECT_EQ(eventTable->GetNumTracks(), 1); EXPECT_TRUE(commandManager.Undo(result)) << result.c_str(); EXPECT_EQ(eventTable->GetNumTracks(), 2); EXPECT_EQ(eventTable->GetTrack(1)->GetNameString(), eventTrackName); EXPECT_TRUE(commandManager.Redo(result)) << result.c_str(); EXPECT_EQ(eventTable->GetNumTracks(), 1); motion->Destroy(); } } // namespace EMotionFX
748
625
package javatest.sleep; import com.jtransc.io.JTranscConsole; import jtransc.jtransc.nativ.JTranscJsNativeMixedTest; public class SleepTest { static public void main(String[] args) { JTranscConsole.log(1234560007); JTranscConsole.log("SleepTest.main:"); long start = System.currentTimeMillis(); try { Thread.sleep(100L); } catch (InterruptedException e) { e.printStackTrace(); } long end = System.currentTimeMillis(); System.out.println(" - Slept for at least 100ms? " + ((end - start) >= 99)); } }
199
8,456
package redis.clients.jedis.graph.entities; import java.util.Objects; /** * A Graph entity property. Has a name, type, and value * * @param <T> */ public class Property<T> { private final String name; private final T value; public Property(String name, T value) { this.name = name; this.value = value; } public String getName() { return name; } public T getValue() { return value; } private boolean valueEquals(Object value1, Object value2) { if (value1 instanceof Integer) value1 = ((Integer) value1).longValue(); if (value2 instanceof Integer) value2 = ((Integer) value2).longValue(); return Objects.equals(value1, value2); } @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof Property)) return false; Property<?> property = (Property<?>) o; return Objects.equals(name, property.name) && valueEquals(value, property.value); } @Override public int hashCode() { return Objects.hash(name, value); } /** * Default toString implementation * * @return */ @Override public String toString() { final StringBuilder sb = new StringBuilder("Property{"); sb.append("name='").append(name).append('\''); sb.append(", value=").append(value); sb.append('}'); return sb.toString(); } }
475
1,127
<reponame>ryanloney/openvino-1 // Copyright (C) 2018-2022 Intel Corporation // SPDX-License-Identifier: Apache-2.0 // #include "deconvolution_kernel_selector.h" #include "deconvolution_kernel_ref.h" #include "deconvolution_kernel_bfyx_opt.h" #include "deconvolution_kernel_b_fs_zyx_fsv16.h" #include "deconvolution_kernel_b_fs_zyx_fsv16_dw.h" #include "deconvolution_kernel_imad_ref.hpp" #include "deconvolution_kernel_imad_along_f_tile_bfx.hpp" namespace kernel_selector { deconvolution_kernel_selector::deconvolution_kernel_selector() { Attach<DeconvolutionKernelRef>(); Attach<DeconvolutionKernel_bfyx_opt>(); Attach<DeconvolutionKernel_b_fs_zyx_fsv16>(); Attach<DeconvolutionKernel_b_fs_zyx_fsv16_dw>(); Attach<DeconvolutionKernel_imad_ref>(); Attach<DeconvolutionKernel_imad_along_f_tile_bfx>(); } KernelsData deconvolution_kernel_selector::GetBestKernels(const Params& params, const optional_params& options) const { return GetNaiveBestKernel(params, options, KernelType::DECONVOLUTION); } } // namespace kernel_selector
433
17,275
/* * The MIT License * * Copyright (c) 2004-2009, Sun Microsystems, Inc., <NAME> * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package hudson.model; import static hudson.init.InitMilestone.JOB_CONFIG_ADAPTED; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import hudson.Extension; import hudson.ExtensionList; import hudson.ExtensionListListener; import hudson.ExtensionPoint; import hudson.init.Initializer; import hudson.triggers.SafeTimerTask; import hudson.triggers.Trigger; import java.util.HashSet; import java.util.Random; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.logging.Logger; import jenkins.util.Timer; /** * Extension point to perform a periodic task in Hudson (through {@link Timer}.) * * <p> * This extension point is useful if your plugin needs to perform some work in the background periodically * (for example, monitoring, batch processing, garbage collection, etc.) * * <p> * Put {@link Extension} on your class to have it picked up and registered automatically, or * manually insert this to {@link Trigger#timer}. * * <p> * This class is designed to run a short task. Implementations whose periodic work takes a long time * to run should extend from {@link AsyncPeriodicWork} instead. * * @author <NAME> * @see AsyncPeriodicWork */ @SuppressFBWarnings(value="PREDICTABLE_RANDOM", justification = "The random is just used for an initial delay.") public abstract class PeriodicWork extends SafeTimerTask implements ExtensionPoint { /** @deprecated Use your own logger, or send messages to the logger in {@link AsyncPeriodicWork#execute}. */ @SuppressWarnings("NonConstantLogger") @Deprecated protected final Logger logger = Logger.getLogger(getClass().getName()); /** * Gets the number of milliseconds between successive executions. * * <p> * Hudson calls this method once to set up a recurring timer, instead of * calling this each time after the previous execution completed. So this class cannot be * used to implement a non-regular recurring timer. * * <p> * IOW, the method should always return the same value. */ public abstract long getRecurrencePeriod(); /** * Gets the number of milliseconds til the first execution. * * <p> * By default it chooses the value randomly between 0 and {@link #getRecurrencePeriod()} */ public long getInitialDelay() { long l = RANDOM.nextLong(); // Math.abs(Long.MIN_VALUE)==Long.MIN_VALUE! if (l==Long.MIN_VALUE) l++; return Math.abs(l)%getRecurrencePeriod(); } /** * Returns all the registered {@link PeriodicWork}s. */ public static ExtensionList<PeriodicWork> all() { return ExtensionList.lookup(PeriodicWork.class); } @Initializer(after= JOB_CONFIG_ADAPTED) public static void init() { // start all PeriodicWorks ExtensionList<PeriodicWork> extensionList = all(); extensionList.addListener(new PeriodicWorkExtensionListListener(extensionList)); for (PeriodicWork p : extensionList) { schedulePeriodicWork(p); } } private static void schedulePeriodicWork(PeriodicWork p) { Timer.get().scheduleAtFixedRate(p, p.getInitialDelay(), p.getRecurrencePeriod(), TimeUnit.MILLISECONDS); } // time constants protected static final long MIN = 1000*60; protected static final long HOUR =60*MIN; protected static final long DAY = 24*HOUR; private static final Random RANDOM = new Random(); /** * ExtensionListener that will kick off any new AperiodWork extensions from plugins that are dynamically * loaded. */ private static class PeriodicWorkExtensionListListener extends ExtensionListListener { private final Set<PeriodicWork> registered = new HashSet<>(); PeriodicWorkExtensionListListener(ExtensionList<PeriodicWork> initiallyRegistered) { registered.addAll(initiallyRegistered); } @Override public void onChange() { synchronized (registered) { for (PeriodicWork p : PeriodicWork.all()) { // it is possibly to programatically remove Extensions but that is rarely used. if (!registered.contains(p)) { schedulePeriodicWork(p); registered.add(p); } } } } } }
1,896
1,639
<gh_stars>1000+ #include<bits/stdc++.h> using namespace std; using ll = long long; const ll INF = 1e18; template<typename T> struct heap { struct node{ node* ch[2] = {0, 0}; int sz; T val; int from, to; node(T val, int from, int to): sz(1), val(val), from(from), to(to) {} }; node* root = 0; heap(node* t = 0): root(t) {} node* meld(node* a, node* b) { if (!b) return a ? new node(*a) : 0; if (!a) return b ? new node(*b) : 0; a = new node(*a); b = new node(*b); if (a -> val > b -> val) swap(a, b); a -> ch[1] = meld(a -> ch[1], b); if (!a -> ch[0] || a -> ch[0] -> sz < a -> ch[1] -> sz) swap(a -> ch[0], a -> ch[1]); a -> sz = (a -> ch[1] ? a -> ch[1] -> sz : 0) + 1; return a; } heap meld(heap b) { return heap(meld(root, b.root)); } heap insert(T x, int from, int to) { // from and to is needed only for this problem return heap(meld(root, new node(x, from, to))); } heap pop() { return heap(meld(root -> ch[0], root -> ch[1])); } T top() { // returns the smallest value of the heap return root ? root -> val : T(-1); } bool empty() { return !root; } }; vector<vector<array<int, 3>>> g; vector<vector<array<int, 3>>> rg; vector<ll> Eppstein(int n, int src, int dest, int k) { // 0 indexed, directed graph vector<ll> d(n, INF); vector<int> par(n, -1); vector<int> idx(n, -1); vector<vector<int>> t(n); priority_queue<pair<ll, int>, vector<pair<ll, int>>, greater<pair<ll, int>>> pq; pq.emplace(0, dest); d[dest] = 0; while (!pq.empty()) { auto [w, u] = pq.top(); pq.pop(); for (auto [v, cost, id]: g[u]) { if (d[v] > w + cost) { d[v] = w + cost; par[v] = u; idx[v] = id; pq.emplace(d[v], v); } } } for (int i = 0; i < n; ++i) { if (par[i] != -1) t[par[i]].push_back(i); } vector<heap<ll>> h(n); for (int i = 0; i < n; ++i) h[i] = heap<ll>(); queue<int> q; q.emplace(dest); while (!q.empty()) { auto u = q.front(); q.pop(); if (par[u] != -1) h[u] = h[u].meld(h[par[u]]); for (auto [v, cost, id]: rg[u]) { if (id != idx[u]) h[u] = h[u].insert(cost - d[u] + d[v], u, v); } for (auto v:t[u]) { q.emplace(v); } } auto comp = [](auto s, auto t) { return get<0>(s) > get<0>(t); }; priority_queue<tuple<ll, heap<ll>::node*>, vector<tuple<ll, heap<ll>::node*>>, decltype(comp)> Q(comp); heap<ll> s; s = s.insert(d[src], -1, src); Q.emplace(d[src], s.root); vector<ll>ans; while (!Q.empty()) { auto [w, cur] = Q.top(); Q.pop(); if (w >= INF) break; ans.push_back(w); if ((ans.size()) == k) break; if (cur -> ch[0]) { Q.emplace(w + cur -> ch[0] -> val - cur -> val, cur -> ch[0]); } if (cur -> ch[1]) { Q.emplace(w + cur -> ch[1] -> val - cur -> val, cur -> ch[1]); } if (h[cur -> to].root) { Q.emplace(w + h[cur -> to].root -> val, h[cur -> to].root); } } while (ans.size() < k) { ans.push_back(-1); } return ans; } int32_t main() { ios_base::sync_with_stdio(0); cin.tie(0); int n, m, src, dest, k; cin >> n >> m >> src >> dest >> k; g.resize(n); rg.resize(n); for (int i = 0; i < m; ++i) { int u, v, w; cin >> u >> v >> w; g[v].push_back({u, w, i}); rg[u].push_back({v, w, i}); } auto ans = Eppstein(n, src, dest, k); for (auto x: ans) { cout << x << '\n'; } return 0; } // https://qiita.com/hotman78/items/42534a01c4bd05ed5e1e // https://judge.yosupo.jp/problem/k_shortest_walk
1,731
1,420
<gh_stars>1000+ /* * * Copyright 2018 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.genie.agent.execution.services.impl.grpc; import com.netflix.genie.agent.execution.services.AgentFileStreamService; import com.netflix.genie.agent.execution.services.AgentHeartBeatService; import com.netflix.genie.agent.execution.services.AgentJobKillService; import com.netflix.genie.agent.execution.services.AgentJobService; import com.netflix.genie.agent.execution.services.KillService; import com.netflix.genie.agent.properties.AgentProperties; import com.netflix.genie.common.internal.dtos.v4.converters.JobDirectoryManifestProtoConverter; import com.netflix.genie.common.internal.dtos.v4.converters.JobServiceProtoConverter; import com.netflix.genie.common.internal.services.JobDirectoryManifestCreatorService; import com.netflix.genie.proto.FileStreamServiceGrpc; import com.netflix.genie.proto.HeartBeatServiceGrpc; import com.netflix.genie.proto.JobKillServiceGrpc; import com.netflix.genie.proto.JobServiceGrpc; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Lazy; import org.springframework.scheduling.TaskScheduler; /** * Spring auto configuration for the various gRPC services required for an agent to communicate with the Genie server. * * @author tgianos * @since 4.0.0 */ @Configuration @EnableConfigurationProperties( { AgentProperties.class } ) public class GRpcServicesAutoConfiguration { /** * Provide a lazy gRPC agent heart beat service if one isn't already defined. * * @param heartBeatServiceStub The heart beat service stub to use * @param taskScheduler The task scheduler to use * @param agentProperties The agent properties * @return A {@link GrpcAgentHeartBeatServiceImpl} instance */ @Bean @Lazy @ConditionalOnMissingBean(AgentHeartBeatService.class) public GrpcAgentHeartBeatServiceImpl agentHeartBeatService( final HeartBeatServiceGrpc.HeartBeatServiceStub heartBeatServiceStub, @Qualifier("heartBeatServiceTaskScheduler") final TaskScheduler taskScheduler, final AgentProperties agentProperties ) { return new GrpcAgentHeartBeatServiceImpl( heartBeatServiceStub, taskScheduler, agentProperties.getHeartBeatService() ); } /** * Provide a lazy gRPC agent job kill service bean if one isn't already defined. * * @param jobKillServiceFutureStub The future stub to use for the service communication with the server * @param killService The kill service to use to terminate this agent gracefully * @param taskScheduler The task scheduler to use * @param agentProperties The agent properties * @return A {@link GRpcAgentJobKillServiceImpl} instance */ @Bean @Lazy @ConditionalOnMissingBean(AgentJobKillService.class) public GRpcAgentJobKillServiceImpl agentJobKillService( final JobKillServiceGrpc.JobKillServiceFutureStub jobKillServiceFutureStub, final KillService killService, @Qualifier("sharedAgentTaskScheduler") final TaskScheduler taskScheduler, final AgentProperties agentProperties ) { return new GRpcAgentJobKillServiceImpl( jobKillServiceFutureStub, killService, taskScheduler, agentProperties.getJobKillService() ); } /** * Provide a lazy gRPC agent job service bean if one isn't already defined. * * @param jobServiceFutureStub The future stub to use for communication with the server * @param jobServiceProtoConverter The converter to use between DTO and Proto instances * @return A {@link GRpcAgentJobServiceImpl} instance */ @Bean @Lazy @ConditionalOnMissingBean(AgentJobService.class) public GRpcAgentJobServiceImpl agentJobService( final JobServiceGrpc.JobServiceFutureStub jobServiceFutureStub, final JobServiceProtoConverter jobServiceProtoConverter ) { return new GRpcAgentJobServiceImpl(jobServiceFutureStub, jobServiceProtoConverter); } /** * Provide a lazy gRPC agent file stream service if one isn't already defined. * * @param fileStreamServiceStub The stub to use for communications with the server * @param taskScheduler The task scheduler to use * @param jobDirectoryManifestProtoConverter The converter to serialize manifests into messages * @param jobDirectoryManifestCreatorService The job directory manifest service * @param agentProperties The agent properties * @return A {@link AgentFileStreamService} instance */ @Bean @Lazy @ConditionalOnMissingBean(AgentFileStreamService.class) public GRpcAgentFileStreamServiceImpl agentFileStreamService( final FileStreamServiceGrpc.FileStreamServiceStub fileStreamServiceStub, @Qualifier("sharedAgentTaskScheduler") final TaskScheduler taskScheduler, final JobDirectoryManifestProtoConverter jobDirectoryManifestProtoConverter, final JobDirectoryManifestCreatorService jobDirectoryManifestCreatorService, final AgentProperties agentProperties ) { return new GRpcAgentFileStreamServiceImpl( fileStreamServiceStub, taskScheduler, jobDirectoryManifestProtoConverter, jobDirectoryManifestCreatorService, agentProperties.getFileStreamService() ); } }
2,239
30,023
"""The Oncue integration.""" from __future__ import annotations from datetime import timedelta import logging from aiooncue import LoginFailedException, Oncue from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from .const import CONNECTION_EXCEPTIONS, DOMAIN PLATFORMS: list[str] = [Platform.BINARY_SENSOR, Platform.SENSOR] _LOGGER = logging.getLogger(__name__) async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Oncue from a config entry.""" data = entry.data websession = async_get_clientsession(hass) client = Oncue(data[CONF_USERNAME], data[CONF_PASSWORD], websession) try: await client.async_login() except CONNECTION_EXCEPTIONS as ex: raise ConfigEntryNotReady(ex) from ex except LoginFailedException as ex: _LOGGER.error("Failed to login to oncue service: %s", ex) return False coordinator = DataUpdateCoordinator( hass, _LOGGER, name=f"Oncue {entry.data[CONF_USERNAME]}", update_interval=timedelta(minutes=10), update_method=client.async_fetch_all, ) await coordinator.async_config_entry_first_refresh() hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator hass.config_entries.async_setup_platforms(entry, PLATFORMS) return True async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): hass.data[DOMAIN].pop(entry.entry_id) return unload_ok
696
337
<gh_stars>100-1000 //file class Test { public static int foo(String[] args) { return args.length; } }
41
14,668
// Copyright 2016 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef CONTENT_BROWSER_RENDERER_HOST_PEPPER_PEPPER_VPN_PROVIDER_MESSAGE_FILTER_CHROMEOS_H_ #define CONTENT_BROWSER_RENDERER_HOST_PEPPER_PEPPER_VPN_PROVIDER_MESSAGE_FILTER_CHROMEOS_H_ #include <stdint.h> #include <memory> #include <string> #include <vector> #include "base/callback.h" #include "base/containers/queue.h" #include "base/memory/ref_counted.h" #include "base/memory/weak_ptr.h" #include "content/browser/renderer_host/pepper/browser_ppapi_host_impl.h" #include "content/public/browser/vpn_service_proxy.h" #include "ipc/ipc_message.h" #include "ppapi/c/pp_instance.h" #include "ppapi/c/ppb_vpn_provider.h" #include "ppapi/host/host_message_context.h" #include "ppapi/host/resource_host.h" #include "ppapi/host/resource_message_filter.h" #include "ppapi/shared_impl/vpn_provider_util.h" #include "url/gurl.h" namespace content { class BrowserContext; // The host for PPB_VpnProvider. // Important: The PPB_VpnProvider API is available only on Chrome OS. class PepperVpnProviderMessageFilter : public ppapi::host::ResourceMessageFilter { public: PepperVpnProviderMessageFilter(BrowserPpapiHostImpl* host, PP_Instance instance); PepperVpnProviderMessageFilter(const PepperVpnProviderMessageFilter&) = delete; PepperVpnProviderMessageFilter& operator=( const PepperVpnProviderMessageFilter&) = delete; // ppapi::host::ResourceMessageFilter overrides. scoped_refptr<base::SequencedTaskRunner> OverrideTaskRunnerForMessage( const IPC::Message& message) override; int32_t OnResourceMessageReceived( const IPC::Message& msg, ppapi::host::HostMessageContext* context) override; // PepperVpnProviderResourceHostProxyImpl entry points. void SendOnPacketReceived(const std::vector<char>& packet); void SendOnUnbind(); private: using SuccessCallback = base::OnceClosure; using FailureCallback = base::OnceCallback<void(const std::string& error_name, const std::string& error_message)>; ~PepperVpnProviderMessageFilter() override; // Message handlers int32_t OnBind(ppapi::host::HostMessageContext* context, const std::string& configuration_id, const std::string& configuration_name); int32_t OnSendPacket(ppapi::host::HostMessageContext* context, uint32_t packet_size, uint32_t id); int32_t OnPacketReceivedReply(ppapi::host::HostMessageContext* context, uint32_t id); // OnBind helpers int32_t DoBind(SuccessCallback success_callback, FailureCallback failure_callback); void OnBindSuccess(const ppapi::host::ReplyMessageContext& context); void OnBindFailure(const ppapi::host::ReplyMessageContext& context, const std::string& error_name, const std::string& error_message); void OnBindReply(const ppapi::host::ReplyMessageContext& context, int32_t reply); // OnSendPacket helpers int32_t DoSendPacket(const std::vector<char>& packet, SuccessCallback success_callback, FailureCallback failure_callback); void OnSendPacketSuccess(const ppapi::host::ReplyMessageContext& context, uint32_t id); void OnSendPacketFailure(const ppapi::host::ReplyMessageContext& context, uint32_t id, const std::string& error_name, const std::string& error_message); void OnSendPacketReply(const ppapi::host::ReplyMessageContext& context, uint32_t id); // OnPacketReceived helper void DoPacketReceived(const std::vector<char>& packet, uint32_t id); GURL document_url_; std::string configuration_id_; std::string configuration_name_; BrowserContext* browser_context_; std::unique_ptr<VpnServiceProxy> vpn_service_proxy_; bool bound_; std::unique_ptr<ppapi::VpnProviderSharedBuffer> send_packet_buffer_; std::unique_ptr<ppapi::VpnProviderSharedBuffer> recv_packet_buffer_; base::queue<std::vector<char>> received_packets_; base::WeakPtrFactory<PepperVpnProviderMessageFilter> weak_factory_{this}; }; } // namespace content #endif // CONTENT_BROWSER_RENDERER_HOST_PEPPER_PEPPER_VPN_PROVIDER_MESSAGE_FILTER_CHROMEOS_H_
1,785
679
<reponame>Grosskopf/openoffice<gh_stars>100-1000 /************************************************************** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * *************************************************************/ // MARKER(update_precomp.py): autogen include statement, do not remove #include "precompiled_odk.hxx" #if defined _MSC_VER #pragma warning(push, 1) #endif #include <windows.h> #if defined _MSC_VER #pragma warning(pop) #endif #include <jni.h> extern "C" BOOL __stdcall _DllMainCRTStartup(HINSTANCE, DWORD, LPVOID) { return TRUE; } extern "C" JNIEXPORT jboolean JNICALL Java_com_sun_star_lib_loader_WinRegKey_winreg_1RegOpenClassesRoot( JNIEnv *env, jclass, jlongArray hkresult) { jboolean ret = JNI_FALSE; PHKEY phkey = (PHKEY)env->GetLongArrayElements(hkresult, 0); if (RegOpenKeyEx(HKEY_CLASSES_ROOT, NULL, 0, KEY_READ, phkey) == ERROR_SUCCESS) ret = JNI_TRUE; env->ReleaseLongArrayElements(hkresult, (jlong *)phkey, 0); return ret; } extern "C" JNIEXPORT jboolean JNICALL Java_com_sun_star_lib_loader_WinRegKey_winreg_1RegOpenCurrentConfig( JNIEnv *env, jclass, jlongArray hkresult) { jboolean ret = JNI_FALSE; PHKEY phkey = (PHKEY)env->GetLongArrayElements(hkresult, 0); if (RegOpenKeyEx(HKEY_CURRENT_CONFIG, NULL, 0, KEY_READ, phkey) == ERROR_SUCCESS) ret = JNI_TRUE; env->ReleaseLongArrayElements(hkresult, (jlong *)phkey, 0); return ret; } extern "C" JNIEXPORT jboolean JNICALL Java_com_sun_star_lib_loader_WinRegKey_winreg_1RegOpenCurrentUser( JNIEnv *env, jclass, jlongArray hkresult) { jboolean ret = JNI_FALSE; PHKEY phkey = (PHKEY)env->GetLongArrayElements(hkresult, 0); if (RegOpenKeyEx(HKEY_CURRENT_USER, NULL, 0, KEY_READ, phkey) == ERROR_SUCCESS) ret = JNI_TRUE; env->ReleaseLongArrayElements(hkresult, (jlong *)phkey, 0); return ret; } extern "C" JNIEXPORT jboolean JNICALL Java_com_sun_star_lib_loader_WinRegKey_winreg_1RegOpenLocalMachine( JNIEnv *env, jclass, jlongArray hkresult) { jboolean ret = JNI_FALSE; PHKEY phkey = (PHKEY)env->GetLongArrayElements(hkresult, 0); if (RegOpenKeyEx(HKEY_LOCAL_MACHINE, NULL, 0, KEY_READ, phkey) == ERROR_SUCCESS) ret = JNI_TRUE; env->ReleaseLongArrayElements(hkresult, (jlong *)phkey, 0); return ret; } extern "C" JNIEXPORT jboolean JNICALL Java_com_sun_star_lib_loader_WinRegKey_winreg_1RegOpenUsers( JNIEnv *env, jclass, jlongArray hkresult) { jboolean ret = JNI_FALSE; PHKEY phkey = (PHKEY)env->GetLongArrayElements(hkresult, 0); if (RegOpenKeyEx(HKEY_USERS, NULL, 0, KEY_READ, phkey) == ERROR_SUCCESS) ret = JNI_TRUE; env->ReleaseLongArrayElements(hkresult, (jlong *)phkey, 0); return ret; } extern "C" JNIEXPORT jboolean JNICALL Java_com_sun_star_lib_loader_WinRegKey_winreg_1RegOpenKeyEx( JNIEnv *env, jclass, jlong parent, jstring name, jlongArray hkresult) { jboolean ret = JNI_FALSE; const char *namestr = env->GetStringUTFChars(name, 0); PHKEY phkey = (PHKEY)env->GetLongArrayElements(hkresult, 0); if (RegOpenKeyEx((HKEY)parent, namestr, 0, KEY_READ, phkey) == ERROR_SUCCESS) ret = JNI_TRUE; env->ReleaseStringUTFChars(name, namestr); env->ReleaseLongArrayElements(hkresult, (jlong *)phkey, 0); return ret; } extern "C" JNIEXPORT jboolean JNICALL Java_com_sun_star_lib_loader_WinRegKey_winreg_1RegCloseKey( JNIEnv *, jclass, jlong hkey) { jboolean ret = JNI_FALSE; if (RegCloseKey((HKEY)hkey) == ERROR_SUCCESS) ret = JNI_TRUE; return ret; } extern "C" JNIEXPORT jboolean JNICALL Java_com_sun_star_lib_loader_WinRegKey_winreg_1RegQueryValueEx( JNIEnv *env, jclass, jlong hkey, jstring value, jlongArray type, jbyteArray data, jlongArray size) { jboolean ret = JNI_FALSE; const char* valuestr = env->GetStringUTFChars(value, 0); LPDWORD ptype = (LPDWORD)env->GetLongArrayElements(type, 0); LPBYTE pdata = (LPBYTE)env->GetByteArrayElements(data, 0); LPDWORD psize = (LPDWORD)env->GetLongArrayElements(size, 0); if (RegQueryValueEx((HKEY)hkey, valuestr, NULL, ptype, pdata, psize) == ERROR_SUCCESS) ret = JNI_TRUE; env->ReleaseStringUTFChars(value, valuestr); env->ReleaseLongArrayElements(type, (jlong *)ptype, 0); env->ReleaseByteArrayElements(data, (jbyte *)pdata, 0); env->ReleaseLongArrayElements(size, (jlong *)psize, 0); return ret; } extern "C" JNIEXPORT jboolean JNICALL Java_com_sun_star_lib_loader_WinRegKey_winreg_1RegQueryInfoKey( JNIEnv *env, jclass, jlong hkey, jlongArray subkeys, jlongArray maxSubkeyLen, jlongArray values, jlongArray maxValueNameLen, jlongArray maxValueLen, jlongArray secDescriptor) { jboolean ret = JNI_FALSE; LPDWORD psubkeys = (LPDWORD)env->GetLongArrayElements(subkeys, 0); LPDWORD pmaxSubkeyLen = (LPDWORD)env->GetLongArrayElements(maxSubkeyLen, 0); LPDWORD pvalues = (LPDWORD)env->GetLongArrayElements(values, 0); LPDWORD pmaxValueNameLen = (LPDWORD)env->GetLongArrayElements(maxValueNameLen, 0); LPDWORD pmaxValueLen = (LPDWORD)env->GetLongArrayElements(maxValueLen, 0); LPDWORD psecDescriptor = (LPDWORD)env->GetLongArrayElements(secDescriptor, 0); FILETIME ft; if (RegQueryInfoKey((HKEY)hkey, NULL, NULL, NULL, psubkeys, pmaxSubkeyLen, NULL, pvalues, pmaxValueNameLen, pmaxValueLen, psecDescriptor, &ft) == ERROR_SUCCESS) ret = JNI_TRUE; env->ReleaseLongArrayElements(subkeys, (jlong*)psubkeys, 0); env->ReleaseLongArrayElements(maxSubkeyLen, (jlong*)pmaxSubkeyLen, 0); env->ReleaseLongArrayElements(values, (jlong*)pvalues, 0); env->ReleaseLongArrayElements(maxValueNameLen, (jlong*)pmaxValueNameLen, 0); env->ReleaseLongArrayElements(maxValueLen, (jlong*)pmaxValueLen, 0); env->ReleaseLongArrayElements(secDescriptor, (jlong*)psecDescriptor, 0); return ret; }
2,865
575
// Copyright 2019 Proyectos y Sistemas de Mantenimiento SL (eProsima). // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * @file Subscriber.hpp */ #ifndef _FASTDDS_SUBSCRIBER_HPP_ #define _FASTDDS_SUBSCRIBER_HPP_ #include <fastdds/dds/core/Entity.hpp> #include <fastdds/dds/subscriber/DataReaderListener.hpp> #include <fastdds/dds/subscriber/InstanceState.hpp> #include <fastdds/dds/subscriber/qos/DataReaderQos.hpp> #include <fastdds/dds/subscriber/qos/SubscriberQos.hpp> #include <fastdds/dds/subscriber/SampleState.hpp> #include <fastdds/dds/subscriber/ViewState.hpp> #include <fastdds/dds/topic/qos/TopicQos.hpp> #include <fastdds/dds/topic/TypeSupport.hpp> #include <fastrtps/types/TypesBase.h> using eprosima::fastrtps::types::ReturnCode_t; namespace dds { namespace sub { class Subscriber; } // namespace sub } // namespace dds namespace eprosima { namespace fastrtps { class TopicAttributes; } // namespace fastrtps namespace fastdds { namespace dds { class DomainParticipant; class SubscriberListener; class SubscriberImpl; class DataReader; class DataReaderListener; class DataReaderQos; class TopicDescription; /** * Class Subscriber, contains the public API that allows the user to control the reception of messages. * This class should not be instantiated directly. * DomainRTPSParticipant class should be used to correctly create this element. * * @ingroup FASTDDS_MODULE */ class Subscriber : public DomainEntity { protected: friend class SubscriberImpl; friend class DomainParticipantImpl; /** * Create a subscriber, assigning its pointer to the associated implementation. * Don't use directly, create Subscriber using create_subscriber from DomainParticipant. */ Subscriber( SubscriberImpl* pimpl, const StatusMask& mask = StatusMask::all()); Subscriber( DomainParticipant* dp, const SubscriberQos& qos = SUBSCRIBER_QOS_DEFAULT, SubscriberListener* listener = nullptr, const StatusMask& mask = StatusMask::all()); public: /** * @brief Destructor */ virtual ~Subscriber() { } /** * @brief This operation enables the Subscriber * * @return RETCODE_OK is successfully enabled. RETCODE_PRECONDITION_NOT_MET if the participant creating this * Subscriber is not enabled. */ RTPS_DllAPI ReturnCode_t enable() override; /** * Allows accessing the Subscriber Qos. * * @return SubscriberQos reference */ RTPS_DllAPI const SubscriberQos& get_qos() const; /** * Retrieves the Subscriber Qos. * * @param qos SubscriberQos where the qos is returned * @return RETCODE_OK */ RTPS_DllAPI ReturnCode_t get_qos( SubscriberQos& qos) const; /** * Allows modifying the Subscriber Qos. * The given Qos must be supported by the SubscriberQos. * * @param qos new value for SubscriberQos * @return RETCODE_IMMUTABLE_POLICY if any of the Qos cannot be changed, RETCODE_INCONSISTENT_POLICY if the Qos is not * self consistent and RETCODE_OK if the qos is changed correctly. */ RTPS_DllAPI ReturnCode_t set_qos( const SubscriberQos& qos); /** * Retrieves the attached SubscriberListener. * * @return Pointer to the SubscriberListener */ RTPS_DllAPI const SubscriberListener* get_listener() const; /** * Modifies the SubscriberListener, sets the mask to StatusMask::all() * * @param listener new value for SubscriberListener * @return RETCODE_OK */ RTPS_DllAPI ReturnCode_t set_listener( SubscriberListener* listener); /** * Modifies the SubscriberListener. * * @param listener new value for the SubscriberListener * @param mask StatusMask that holds statuses the listener responds to. * @return RETCODE_OK */ RTPS_DllAPI ReturnCode_t set_listener( SubscriberListener* listener, const StatusMask& mask); /** * This operation creates a DataReader. The returned DataReader will be attached and belong to the Subscriber. * * @param topic Topic the DataReader will be listening. * @param reader_qos QoS of the DataReader. * @param listener Pointer to the listener (default: nullptr) * @param mask StatusMask that holds statuses the listener responds to (default: all). * @return Pointer to the created DataReader. nullptr if failed. */ RTPS_DllAPI DataReader* create_datareader( TopicDescription* topic, const DataReaderQos& reader_qos, DataReaderListener* listener = nullptr, const StatusMask& mask = StatusMask::all()); /** * This operation creates a DataReader. The returned DataReader will be attached and belongs to the Subscriber. * * @param topic Topic the DataReader will be listening. * @param profile_name DataReader profile name. * @param listener Pointer to the listener (default: nullptr) * @param mask StatusMask that holds statuses the listener responds to (default: all). * @return Pointer to the created DataReader. nullptr if failed. */ RTPS_DllAPI DataReader* create_datareader_with_profile( TopicDescription* topic, const std::string& profile_name, DataReaderListener* listener = nullptr, const StatusMask& mask = StatusMask::all()); /** * This operation deletes a DataReader that belongs to the Subscriber. * * The delete_datareader operation must be called on the same Subscriber object used to create the DataReader. * If delete_datareader is called on a different Subscriber, the operation will have no effect and it will * return an error. * * @param reader DataReader to delete * @return RETCODE_PRECONDITION_NOT_MET if the datareader does not belong to this subscriber, RETCODE_OK if it is correctly * deleted and RETCODE_ERROR otherwise. */ RTPS_DllAPI ReturnCode_t delete_datareader( const DataReader* reader); /** * This operation retrieves a previously-created DataReader belonging to the Subscriber that is attached to a * Topic with a matching topic_name. If no such DataReader exists, the operation will return nullptr. * * If multiple DataReaders attached to the Subscriber satisfy this condition, then the operation will return * one of them. It is not specified which one. * * @param topic_name Name of the topic associated to the DataReader * @return Pointer to a previously created DataReader created on a Topic with that topic_name */ RTPS_DllAPI DataReader* lookup_datareader( const std::string& topic_name) const; /** * This operation allows the application to access the DataReader objects. * * @param readers Vector of DataReader where the list of existing readers is returned * @return RETCODE_OK */ RTPS_DllAPI ReturnCode_t get_datareaders( std::vector<DataReader*>& readers) const; /** * @brief This operation allows the application to access the DataReader objects that contain samples with the * specified sample_states, view_states, and instance_states. * * @param[out] readers Vector of DataReader where the list of existing readers is returned * @param sample_states Vector of SampleStateKind * @param view_states Vector of ViewStateKind * @param instance_states Vector of InstanceStateKind * @return RETCODE_OK */ RTPS_DllAPI ReturnCode_t get_datareaders( std::vector<DataReader*>& readers, const std::vector<SampleStateKind>& sample_states, const std::vector<ViewStateKind>& view_states, const std::vector<InstanceStateKind>& instance_states) const; /** * This operation checks if the subscriber has DataReaders * * @return true if the subscriber has one or several DataReaders, false in other case */ RTPS_DllAPI bool has_datareaders() const; /** * @brief Indicates that the application is about to access the data samples in any of the DataReader objects * attached to the Subscriber. * * @return RETCODE_OK */ RTPS_DllAPI ReturnCode_t begin_access(); /** * @brief Indicates that the application has finished accessing the data samples in DataReader objects managed by * the Subscriber. * * @return RETCODE_OK */ RTPS_DllAPI ReturnCode_t end_access(); /** * This operation invokes the operation on_data_available on the DataReaderListener objects attached to * contained DataReader entities. * * This operation is typically invoked from the on_data_on_readers operation in the SubscriberListener. * That way the SubscriberListener can delegate to the DataReaderListener objects the handling of the data. * * @return RETCODE_OK */ RTPS_DllAPI ReturnCode_t notify_datareaders() const; /** * @brief Deletes all contained DataReaders. If the DataReaders have any QueryCondition or ReadCondition, they are * deleted before the DataReader itself. * * @return RETCODE_OK if successful, an error code otherwise */ RTPS_DllAPI ReturnCode_t delete_contained_entities(); /** * This operation sets a default value of the DataReader QoS policies which will be used for newly created * DataReader entities in the case where the QoS policies are defaulted in the create_datareader operation. * * This operation will check that the resulting policies are self consistent; if they are not, the operation * will have no effect and return false. * * The special value DATAREADER_QOS_DEFAULT may be passed to this operation to indicate that the default QoS * should be reset back to the initial values the factory would use, that is the values that would be used * if the set_default_datareader_qos operation had never been called. * * @param qos new value for DataReaderQos to set as default * @return RETCODE_INCONSISTENT_POLICY if the Qos is not self consistent and RETCODE_OK if the qos is changed correctly. */ RTPS_DllAPI ReturnCode_t set_default_datareader_qos( const DataReaderQos& qos); /** * This operation returns the default value of the DataReader QoS, that is, the QoS policies which will be * used for newly created DataReader entities in the case where the QoS policies are defaulted in the * create_datareader operation. * * The values retrieved get_default_datareader_qos will match the set of values specified on the last successful * call to get_default_datareader_qos, or else, if the call was never made, the default values. * * @return Current default DataReaderQos. */ RTPS_DllAPI const DataReaderQos& get_default_datareader_qos() const; /** * This operation returns the default value of the DataReader QoS, that is, the QoS policies which will be * used for newly created DataReader entities in the case where the QoS policies are defaulted in the * create_datareader operation. * * The values retrieved get_default_datareader_qos will match the set of values specified on the last successful * call to get_default_datareader_qos, or else, if the call was never made, the default values. * * @return Current default DataReaderQos. */ RTPS_DllAPI DataReaderQos& get_default_datareader_qos(); /** * This operation retrieves the default value of the DataReader QoS, that is, the QoS policies which will be * used for newly created DataReader entities in the case where the QoS policies are defaulted in the * create_datareader operation. * * The values retrieved get_default_datareader_qos will match the set of values specified on the last successful * call to get_default_datareader_qos, or else, if the call was never made, the default values. * * @param qos DataReaderQos where the default_qos is returned * @return RETCODE_OK */ RTPS_DllAPI ReturnCode_t get_default_datareader_qos( DataReaderQos& qos) const; /** * Fills the DataReaderQos with the values of the XML profile. * * @param profile_name DataReader profile name. * @param qos DataReaderQos object where the qos is returned. * @return RETCODE_OK if the profile exists. RETCODE_BAD_PARAMETER otherwise. */ RTPS_DllAPI ReturnCode_t get_datareader_qos_from_profile( const std::string& profile_name, DataReaderQos& qos) const; /** * @brief Copies TopicQos into the corresponding DataReaderQos * * @param[in, out] reader_qos * @param[in] topic_qos * @return RETCODE_OK if successful, an error code otherwise */ RTPS_DllAPI static ReturnCode_t copy_from_topic_qos( DataReaderQos& reader_qos, const TopicQos& topic_qos); /** * This operation returns the DomainParticipant to which the Subscriber belongs. * * @return DomainParticipant Pointer */ RTPS_DllAPI const DomainParticipant* get_participant() const; /** * Returns the Subscriber's handle. * * @return InstanceHandle of this Subscriber. */ RTPS_DllAPI const InstanceHandle_t& get_instance_handle() const; protected: SubscriberImpl* impl_; friend class ::dds::sub::Subscriber; }; } /* namespace dds */ } /* namespace fastdds */ } /* namespace eprosima */ #endif /* _FASTDDS_SUBSCRIBER_HPP_ */
5,023
4,409
/* * Copyright 2008 Web Cohesion * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.security.oauth.provider.filter; import org.apache.commons.codec.DecoderException; import static org.springframework.security.oauth.common.OAuthCodec.oauthDecode; import static org.springframework.security.oauth.common.OAuthCodec.oauthEncode; import org.springframework.security.oauth.common.OAuthConsumerParameter; import org.springframework.security.oauth.common.StringSplitUtils; import org.springframework.security.oauth.provider.OAuthProviderSupport; import javax.servlet.http.HttpServletRequest; import java.util.*; import java.net.URL; import java.net.MalformedURLException; /** * Utility for common logic for supporting an OAuth provider. * * <p> * @deprecated The OAuth 1.0 Protocol <a href="https://tools.ietf.org/html/rfc5849">RFC 5849</a> is obsoleted by the OAuth 2.0 Authorization Framework <a href="https://tools.ietf.org/html/rfc6749">RFC 6749</a>. * * @author <NAME> */ @Deprecated public class CoreOAuthProviderSupport implements OAuthProviderSupport { private final Set<String> supportedOAuthParameters; private String baseUrl = null; public CoreOAuthProviderSupport() { Set<String> supportedOAuthParameters = new TreeSet<String>(); for (OAuthConsumerParameter supportedParameter : OAuthConsumerParameter.values()) { supportedOAuthParameters.add(supportedParameter.toString()); } this.supportedOAuthParameters = supportedOAuthParameters; } // Inherited. public Map<String, String> parseParameters(HttpServletRequest request) { Map<String, String> parameters = parseHeaderParameters(request); if (parameters == null) { //if there is no header authorization parameters, then the oauth parameters are the supported OAuth request parameters. parameters = new HashMap<String, String>(); for (String supportedOAuthParameter : getSupportedOAuthParameters()) { String param = request.getParameter(supportedOAuthParameter); if (param != null) { parameters.put(supportedOAuthParameter, param); } } } return parameters; } /** * Parse the OAuth header parameters. The parameters will be oauth-decoded. * * @param request The request. * @return The parsed parameters, or null if no OAuth authorization header was supplied. */ protected Map<String, String> parseHeaderParameters(HttpServletRequest request) { String header = null; Enumeration<String> headers = request.getHeaders("Authorization"); while (headers.hasMoreElements()) { String value = headers.nextElement(); if ((value.toLowerCase().startsWith("oauth "))) { header = value; break; } } Map<String, String> parameters = null; if (header != null) { parameters = new HashMap<String, String>(); String authHeaderValue = header.substring(6); //create a map of the authorization header values per OAuth Core 1.0, section 5.4.1 String[] headerEntries = StringSplitUtils.splitIgnoringQuotes(authHeaderValue, ','); for (Object o : StringSplitUtils.splitEachArrayElementAndCreateMap(headerEntries, "=", "\"").entrySet()) { Map.Entry entry = (Map.Entry) o; try { String key = oauthDecode((String) entry.getKey()); String value = oauthDecode((String) entry.getValue()); parameters.put(key, value); } catch (DecoderException e) { throw new IllegalStateException(e); } } } return parameters; } /** * Get the supported OAuth parameters. The default implementation supports only the OAuth core parameters. * * @return The OAuth core parameters. */ protected Set<String> getSupportedOAuthParameters() { return this.supportedOAuthParameters; } // Inherited. public String getSignatureBaseString(HttpServletRequest request) { SortedMap<String, SortedSet<String>> significantParameters = loadSignificantParametersForSignatureBaseString(request); //now concatenate them into a single query string according to the spec. StringBuilder queryString = new StringBuilder(); Iterator<Map.Entry<String, SortedSet<String>>> paramIt = significantParameters.entrySet().iterator(); while (paramIt.hasNext()) { Map.Entry<String, SortedSet<String>> sortedParameter = paramIt.next(); Iterator<String> valueIt = sortedParameter.getValue().iterator(); while (valueIt.hasNext()) { String parameterValue = valueIt.next(); queryString.append(sortedParameter.getKey()).append('=').append(parameterValue); if (paramIt.hasNext() || valueIt.hasNext()) { queryString.append('&'); } } } String url = getBaseUrl(request); if (url == null) { //if no URL is configured, then we'll attempt to reconstruct the URL. This may be inaccurate. url = request.getRequestURL().toString(); } url = normalizeUrl(url); url = oauthEncode(url); String method = request.getMethod().toUpperCase(); return new StringBuilder(method).append('&').append(url).append('&').append(oauthEncode(queryString.toString())).toString(); } /** * Normalize the URL for use in the signature. The OAuth spec says the URL protocol and host are to be lower-case, * and the query and fragments are to be stripped. * * @param url The URL. * @return The URL normalized for use in the signature. */ protected String normalizeUrl(String url) { try { URL requestURL = new URL(url); StringBuilder normalized = new StringBuilder(requestURL.getProtocol().toLowerCase()).append("://").append(requestURL.getHost().toLowerCase()); if ((requestURL.getPort() >= 0) && (requestURL.getPort() != requestURL.getDefaultPort())) { normalized.append(":").append(requestURL.getPort()); } normalized.append(requestURL.getPath()); return normalized.toString(); } catch (MalformedURLException e) { throw new IllegalStateException("Illegal URL for calculating the OAuth signature.", e); } } /** * Loads the significant parameters (name-to-value map) that are to be used to calculate the signature base string. * The parameters will be encoded, per the spec section 9.1. * * @param request The request. * @return The significan parameters. */ protected SortedMap<String, SortedSet<String>> loadSignificantParametersForSignatureBaseString(HttpServletRequest request) { //first collect the relevant parameters... SortedMap<String, SortedSet<String>> significantParameters = new TreeMap<String, SortedSet<String>>(); //first pull from the request... Enumeration parameterNames = request.getParameterNames(); while (parameterNames.hasMoreElements()) { String parameterName = (String) parameterNames.nextElement(); String[] values = request.getParameterValues(parameterName); if (values == null) { values = new String[]{ "" }; } parameterName = oauthEncode(parameterName); for (String parameterValue : values) { if (parameterValue == null) { parameterValue = ""; } parameterValue = oauthEncode(parameterValue); SortedSet<String> significantValues = significantParameters.get(parameterName); if (significantValues == null) { significantValues = new TreeSet<String>(); significantParameters.put(parameterName, significantValues); } significantValues.add(parameterValue); } } //then take into account the header parameter values... Map<String, String> oauthParams = parseParameters(request); oauthParams.remove("realm"); //remove the realm Set<String> parsedParams = oauthParams.keySet(); for (String parameterName : parsedParams) { String parameterValue = oauthParams.get(parameterName); if (parameterValue == null) { parameterValue = ""; } parameterName = oauthEncode(parameterName); parameterValue = oauthEncode(parameterValue); SortedSet<String> significantValues = significantParameters.get(parameterName); if (significantValues == null) { significantValues = new TreeSet<String>(); significantParameters.put(parameterName, significantValues); } significantValues.add(parameterValue); } //remove the oauth signature parameter value. significantParameters.remove(OAuthConsumerParameter.oauth_signature.toString()); return significantParameters; } /** * The configured base URL for this OAuth provider for the given HttpServletRequest. Default implementation return getBaseUrl() + request URI. * * @param request The HttpServletRequest currently processed * @return The configured base URL for this OAuth provider with respect to the supplied HttpServletRequest. */ protected String getBaseUrl(HttpServletRequest request) { String baseUrl = getBaseUrl(); if (baseUrl != null) { StringBuilder builder = new StringBuilder(baseUrl); String path = request.getRequestURI(); if (path != null && !"".equals(path)) { if (!baseUrl.endsWith("/") && !path.startsWith("/")) { builder.append('/'); } builder.append(path); } baseUrl = builder.toString(); } return baseUrl; } /** * The configured base URL for this OAuth provider. * * @return The configured base URL for this OAuth provider. */ public String getBaseUrl() { return baseUrl; } /** * The configured base URL for the OAuth provider. * * @param baseUrl The configured base URL for the OAuth provider. */ public void setBaseUrl(String baseUrl) { this.baseUrl = baseUrl; } }
3,390
1,568
<gh_stars>1000+ # -*- coding: utf-8 -*- def boot(application, config): if config and config['origins']: try: from flask.ext.cors import CORS for i in config.keys(): application.config['CORS_%s' % i.upper()] = config[i] CORS(application) except Exception as e: raise Exception('Failed to init cors support %s' % e)
190
967
<gh_stars>100-1000 // // ECOMLeaksMessenger.h // EchoSDK // // Created by 陈爱彬 on 2020/1/7. Maintain by 陈爱彬 // Description // #import <Foundation/Foundation.h> NS_ASSUME_NONNULL_BEGIN @interface ECOMLeaksMessenger : NSObject @end NS_ASSUME_NONNULL_END
121
1,350
<reponame>Shashi-rk/azure-sdk-for-java // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. package com.azure.ai.formrecognizer.administration.models; import com.azure.core.annotation.Fluent; /** * Options that may be passed when copying models into the target Form Recognizer resource. */ @Fluent public final class CopyAuthorizationOptions { private String description; /** * Get the model description. * * @return the model description value. */ public String getDescription() { return description; } /** * Set the model description. * * @param description the model description value to set. * @return the CopyAuthorizationOptions object itself. */ public CopyAuthorizationOptions setDescription(String description) { this.description = description; return this; } }
296
852
#ifndef RecoLocalTracker_ESProducers_SiTrackerMultiRecHitUpdatorESProducer_h #define RecoLocalTracker_ESProducers_SiTrackerMultiRecHitUpdatorESProducer_h #include "FWCore/Framework/interface/ESProducer.h" #include "FWCore/ParameterSet/interface/ParameterSet.h" #include "RecoTracker/Record/interface/MultiRecHitRecord.h" #include "RecoTracker/SiTrackerMRHTools/interface/SiTrackerMultiRecHitUpdator.h" #include <memory> class SiTrackerMultiRecHitUpdatorESProducer : public edm::ESProducer { public: SiTrackerMultiRecHitUpdatorESProducer(const edm::ParameterSet &p); ~SiTrackerMultiRecHitUpdatorESProducer() override; std::unique_ptr<SiTrackerMultiRecHitUpdator> produce(const MultiRecHitRecord &); private: edm::ParameterSet pset_; }; #endif // RecoLocalTracker_ESProducers_SiTrackerMultiRecHitUpdatorESProducer_h
278
30,023
<filename>homeassistant/components/hyperion/translations/lb.json { "config": { "abort": { "already_configured": "Service ass scho konfigur\u00e9iert", "already_in_progress": "Konfiguratioun's Oflaf ass schon am gaang", "auth_new_token_not_granted_error": "Nei erstallte Jeton ass net an der Hyperion UI accord\u00e9iert", "auth_new_token_not_work_error": "Feeler bei der Authentifikatioun mam nei erstallte Jeton", "auth_required_error": "Feeler beim best\u00ebmmen ob Autorisatioun erfuerderlech ass", "cannot_connect": "Feeler beim verbannen", "no_id": "D\u00ebs Hyperion Ambilight Instanz huet seng ID net gemellt." }, "error": { "cannot_connect": "Feeler beim verbannen", "invalid_access_token": "<PASSWORD> <PASSWORD>" }, "step": { "auth": { "data": { "create_token": "<PASSWORD>", "token": "<PASSWORD>" }, "description": "Autorisatioun mat dengem Hyperion Ambilight Server konfigur\u00e9ieren" }, "confirm": { "description": "Soll d\u00ebsen Hyperion Ambilight am Home Assistant dob\u00e4i gesaat ginn?\n\n**Host:** {host}\n**Port:** {port}\n**ID**: {id}", "title": "Dob\u00e4isetzen vum Hyperion Ambilight Service best\u00e4tegen" }, "create_token": { "description": "Klick **Ofsch\u00e9cken** fir een neien Authentifikatioun's Jeton unzefroen. Du g\u00ebss dann an Hyperion UI weidergeleet fir d'Ufro z'accord\u00e9ieren. Iwwerpr\u00e9if dass d\u00e9i ugewisen id \"{auth_id}\" ass.", "title": "Neien Authentifikatioun's Jeton automatesch erstellen" }, "create_token_external": { "title": "Neie Jeton an der Hyperion UI accord\u00e9ieren" }, "user": { "data": { "host": "Host", "port": "Port" } } } }, "options": { "step": { "init": { "data": { "priority": "Hyperion Priorit\u00e9it fir Faarwen an Effekter" } } } } }
1,284
459
/* * This file is part of choco-solver, http://choco-solver.org/ * * Copyright (c) 2021, IMT Atlantique. All rights reserved. * * Licensed under the BSD 4-clause license. * * See LICENSE file in the project root for full license information. */ package org.chocosolver.solver.constraints.nary.nvalue; import org.chocosolver.solver.constraints.Propagator; import org.chocosolver.solver.exception.ContradictionException; import org.chocosolver.solver.learn.ExplanationForSignedClause; import org.chocosolver.solver.variables.IntVar; import org.chocosolver.util.ESat; import org.chocosolver.util.objects.setDataStructures.iterable.IntIterableRangeSet; import java.util.stream.IntStream; import static org.chocosolver.solver.constraints.PropagatorPriority.QUADRATIC; import static org.chocosolver.util.tools.ArrayUtils.concat; /** * Propagator for the atMostNValues constraint * The number of distinct values in the set of variables vars is at most equal to nValues * No level of consistency but better than BC in general (for enumerated domains with holes) * * @author <NAME> */ public class PropAtLeastNValues extends Propagator<IntVar> { //*********************************************************************************** // VARIABLES //*********************************************************************************** private int[] concernedValues; private int n; private int[] mate; //*********************************************************************************** // CONSTRUCTORS //*********************************************************************************** /** * Propagator for the NValues constraint * The number of distinct values among concerned values in the set of variables vars is exactly equal to nValues * No level of consistency for the filtering * * @param variables array of integer variables * @param concernedValues will be sorted! * @param nValues integer variable */ public PropAtLeastNValues(IntVar[] variables, int[] concernedValues, IntVar nValues) { super(concat(variables, nValues), QUADRATIC, false); n = variables.length; this.concernedValues = concernedValues; mate = new int[concernedValues.length]; } //*********************************************************************************** // PROPAGATION //*********************************************************************************** @Override public void propagate(int evtmask) throws ContradictionException { vars[n].updateUpperBound(n, this); int count = 0; int countMax = 0; for (int i = concernedValues.length - 1; i >= 0; i--) { boolean possible = false; boolean mandatory = false; mate[i] = -1; int value = concernedValues[i]; for (int v = 0; v < n; v++) { if (vars[v].contains(value)) { possible = true; if (vars[v].isInstantiated()) { mandatory = true; mate[i] = -2; break; } else { if (mate[i] == -1) { mate[i] = v; } else { mate[i] = -2; } } } } if (possible) { countMax++; } if (mandatory) { count++; } } // filtering cardinality variable vars[n].updateUpperBound(countMax, this); // filtering decision variables boolean again = false; if (count < countMax && countMax == vars[n].getLB()) { for (int i = concernedValues.length - 1; i >= 0; i--) { if (mate[i] >= 0) { if (vars[mate[i]].instantiateTo(concernedValues[i], this)) { again = true; } } } if (!again) { int nbInst = 0; for (int i = 0; i < n; i++) { if (vars[i].isInstantiated()) { nbInst++; } } // remove used variables when alldiff is required over uninstantiated variables if (n - nbInst == countMax - count) { for (int i = concernedValues.length - 1; i >= 0; i--) { boolean mandatory = false; int value = concernedValues[i]; for (int v = 0; v < n; v++) { if (vars[v].isInstantiatedTo(value)) { mandatory = true; break; } } if (mandatory) { for (int v = 0; v < n; v++) { if (!vars[v].isInstantiated()) { if (vars[v].removeValue(value, this)) { again = true; } } } } } } } } if (count >= vars[n].getUB()) { setPassive(); } else if (again) { propagate(0);// fix point is required as not all possible values add a mate } } //*********************************************************************************** // INFO //*********************************************************************************** @Override public ESat isEntailed() { int countMin = 0; int countMax = 0; for (int i = concernedValues.length - 1; i >= 0; i--) { boolean possible = false; boolean mandatory = false; for (int v = 0; v < n; v++) { if (vars[v].contains(concernedValues[i])) { possible = true; if (vars[v].isInstantiated()) { mandatory = true; break; } } } if (possible) { countMax++; } if (mandatory) { countMin++; } } if (countMin >= vars[n].getUB()) { return ESat.TRUE; } if (countMax < vars[n].getLB()) { return ESat.FALSE; } return ESat.UNDEFINED; } /** * Find in the implication graph and add to the explanation all the remove value events (the real events added are inverted because only disjunctions are allowed for explanation) */ private void explainDiffForalliForallt(ExplanationForSignedClause e, int[] indexes) { for (int i : indexes) { for(int t : e.root(vars[i])){ if (!e.domain(vars[i]).contains(t)) { vars[i].unionLit(t,e); } }//vars[i].unionLit(e.complement(vars[i]),e); } } /** * Find in the implication graph and add to the explanation all the remove value events except those on value t (the real events added are inverted because only disjunctions are allowed for explanation) * @param t exception value */ private void explainDiffForalliForalltDifft(ExplanationForSignedClause e, int[] indexes, int t) { for (int i : indexes) { for(int tt : e.root(vars[i])){ if (!e.domain(vars[i]).contains(tt)&&t!=tt) { vars[i].unionLit(tt,e); } }//vars[i].unionLit(e.complement(vars[i]),e); } } /** * Find in the implication graph and add in the explanation remove value t events (the real events added are inverted because only disjunctions are allowed for explanation) * @param t value */ private void explainDiffForallit(ExplanationForSignedClause e, int[] indexes, int t) { for (int i : indexes) { if (!e.domain(vars[i]).contains(t)) { vars[i].unionLit(t,e); } } } /** * Find in the implication graph and add to the explanation all the instantiate events except those on value t (the real events added are inverted because only disjunctions are allowed for explanation) * @param t exception value */ private void explainEquaForalliForalltDifft(ExplanationForSignedClause e, int[] indexes, int t) { for (int i : indexes) { for(int tt : e.root(vars[i])){ if (e.domain(vars[i]).contains(tt)&&t!=tt) { vars[i].intersectLit(e.setDiffVal(tt),e); } } } } /** * Find in the implication graph and add in the explanation all instantiation events (the real events added are inverted because only disjunctions are allowed for explanation) */ private void explainEquaForalliForallt(ExplanationForSignedClause e, int[] indexes) { for (int i : indexes) { for (int t : e.root(vars[i])) { if (e.domain(vars[i]).contains(t)) { vars[i].intersectLit(e.setDiffVal(t), e); } } } } /** * Detect and explain the event at pivot variable p * @param p pivot variable */ @Override public void explain(int p, ExplanationForSignedClause e) { IntVar pivot = e.readVar(p); int[] X = IntStream.rangeClosed(0, vars.length - 2).filter(i->vars[i]!=pivot).toArray(); switch (e.readMask(p)) { case 4://DECUPP explainDiffForalliForallt(e, X); pivot.intersectLit(IntIterableRangeSet.MIN, e.domain(pivot).max(), e); break; case 8://INSTANTIATE assert e.readDom(p).size()==1; int t = e.readDom(p).min(); explainDiffForallit(e, X, t); explainDiffForalliForalltDifft(e, X, t); explainEquaForalliForalltDifft(e, X, t); vars[vars.length - 1].unionLit(e.complement(vars[vars.length - 1]),e); IntIterableRangeSet set = e.complement(pivot); set.add(t); pivot.intersectLit(set, e); break; case 2://INCLOW case 1://REMOVE case 0://VOID case 6://BOUND inclow+decup default: throw new UnsupportedOperationException("Unknown event type explanation"); } } }
5,245
416
<reponame>khauser/SimpleFlatMapper<gh_stars>100-1000 package org.simpleflatmapper.datastax.test.beans; import com.datastax.driver.core.TupleValue; public class DbObjectsWithTupleValue { private long id; private TupleValue t; public long getId() { return id; } public void setId(long id) { this.id = id; } public TupleValue getT() { return t; } public void setT(TupleValue t) { this.t = t; } }
210
1,178
/* * Copyright 2020 Makani Technologies LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #ifndef SIM_MODELS_SENSORS_GSG_H_ #define SIM_MODELS_SENSORS_GSG_H_ #include <vector> #include "common/c_math/vec3.h" #include "common/macros.h" #include "control/system_types.h" #include "sim/faults/faults.h" #include "sim/models/perch.h" #include "sim/models/rigid_bodies/wing.h" #include "sim/models/sensors/sensor.h" #include "sim/models/signals/measurement.h" #include "sim/models/tether.h" #include "sim/sim_messages.h" // TODO: Consider renaming this class to // GroundStationEncoders or something similar. class Gsg : public Sensor { friend class GsgTest; public: Gsg(const Perch *perch, const Tether &tether, const Wing &wing, const GsgParams &gsg_params, const GsgSimParams &gsg_sim_params, const PerchParams &perch_params, const PerchSimParams &perch_sim_params, const LevelwindParams &levelwind_params, FaultSchedule *faults); ~Gsg() {} void UpdateSensorOutputs(SimSensorMessage * /*sensor_message*/, TetherUpMessage *tether_up) const override; void Publish() const override; private: void DiscreteStepHelper(double t) override; void UpdateTetherDrum(DrumLabel label, TetherDrum *drum) const; void UpdateTetherPlatform(PlatformLabel label, TetherPlatform *platform) const; double elevation(int32_t i) const { return elevations_[i].recorded(); } double azimuth(int32_t i) const { return azimuths_[i].recorded(); } double twist(int32_t i) const { return twists_[i].recorded(); } double perch_azimuth(int32_t i) const { return perch_azimuths_[i].recorded(); } double levelwind_elevation(int32_t i) const { return levelwind_elevations_[i].recorded(); } // Parameters. const GsgParams &gsg_params_; const GsgSimParams &gsg_sim_params_; const PerchParams &perch_params_; const LevelwindParams &levelwind_params_; // Connections to other models. const Perch *perch_; const Tether &tether_; const Wing &wing_; // Discrete state. DiscreteState<double> actual_elevation_, actual_azimuth_, actual_twist_; DiscreteState<double> actual_perch_azimuth_, actual_levelwind_elevation_; DiscreteState<Vec3> prev_tether_force_g_; // Sub-models. std::vector<Measurement<double>> elevations_, azimuths_, twists_; std::vector<Measurement<double>> perch_azimuths_, levelwind_elevations_; DISALLOW_COPY_AND_ASSIGN(Gsg); }; #endif // SIM_MODELS_SENSORS_GSG_H_
1,090
376
#pragma once #include <Register/Utility.hpp> namespace Kvasir { //cyclic redundancy check calculation unit namespace CrcDr{ ///<Data register using Addr = Register::Address<0x40023000,0x00000000,0x00000000,unsigned>; ///Data register bits constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,0),Register::ReadWriteAccess,unsigned> dr{}; } namespace CrcIdr{ ///<Independent data register using Addr = Register::Address<0x40023004,0xffffff00,0x00000000,unsigned>; ///General-purpose 8-bit data register bits constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> idr{}; } namespace CrcCr{ ///<Control register using Addr = Register::Address<0x40023008,0xffffff1e,0x00000000,unsigned>; ///reset bit constexpr Register::FieldLocation<Addr,Register::maskFromRange(0,0),Register::ReadWriteAccess,unsigned> reset{}; ///Reverse input data constexpr Register::FieldLocation<Addr,Register::maskFromRange(6,5),Register::ReadWriteAccess,unsigned> revIn{}; ///Reverse output data constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,7),Register::ReadWriteAccess,unsigned> revOut{}; } namespace CrcInit{ ///<Initial CRC value using Addr = Register::Address<0x4002300c,0x00000000,0x00000000,unsigned>; ///Programmable initial CRC value constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,0),Register::ReadWriteAccess,unsigned> init{}; } }
618
652
<filename>plugin/src/test/java/com/stratio/cassandra/lucene/search/SearchBuildersTest.java<gh_stars>100-1000 /* * Copyright (C) 2014 Stratio (http://stratio.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.stratio.cassandra.lucene.search; import com.stratio.cassandra.lucene.search.condition.*; import com.stratio.cassandra.lucene.search.condition.builder.*; import com.stratio.cassandra.lucene.search.sort.SimpleSortField; import com.stratio.cassandra.lucene.search.sort.builder.SimpleSortFieldBuilder; import org.junit.Test; import java.io.IOException; import static com.stratio.cassandra.lucene.search.SearchBuilders.*; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; /** * Class for testing {@link Search} builders. * * @author <NAME> {@literal <<EMAIL>>} */ public class SearchBuildersTest { @Test public void testBool() throws IOException { BooleanConditionBuilder builder = bool().must(all()); assertNotNull("Condition builder is not built", builder); BooleanCondition condition = builder.build(); assertNotNull("Condition is not built", condition); } @Test public void testFuzzy() throws IOException { FuzzyConditionBuilder builder = fuzzy("field", "value"); assertNotNull("Condition builder is not built", builder); FuzzyCondition condition = builder.build(); assertEquals("Condition field is not set", "field", condition.field); assertEquals("Condition value is not set", "value", condition.value); } @Test public void testLucene() throws IOException { LuceneConditionBuilder builder = lucene("field:value"); assertNotNull("Condition builder is not built", builder); LuceneCondition condition = builder.build(); assertEquals("Condition query is not set", "field:value", condition.query); } @Test public void testMatch() throws IOException { MatchConditionBuilder builder = match("field", "value"); assertNotNull("Condition builder is not built", builder); MatchCondition condition = builder.build(); assertEquals("Condition field is not set", "field", condition.field); assertEquals("Condition value is not set", "value", condition.value); } @Test public void testMatchAll() throws IOException { AllConditionBuilder builder = all(); assertNotNull("Condition builder is not built", builder); builder.build(); } @Test public void testNone() throws IOException { NoneConditionBuilder builder = none(); assertNotNull("Condition builder is not built", builder); builder.build(); } @Test public void testPhrase() throws IOException { PhraseConditionBuilder builder = phrase("field", "value1 value2").slop(2); assertNotNull("Condition builder is not built", builder); PhraseCondition condition = builder.build(); assertEquals("Condition field is not set", "field", condition.field); assertEquals("Condition value is not set", "value1 value2", condition.value); assertEquals("Condition slop is not set", 2, condition.slop); } @Test public void testPrefix() throws IOException { PrefixConditionBuilder builder = prefix("field", "value"); assertNotNull("Condition builder is not built", builder); PrefixCondition condition = builder.build(); assertEquals("Condition field is not set", "field", condition.field); assertEquals("Condition value is not set", "value", condition.value); } @Test public void testRange() throws IOException { RangeConditionBuilder builder = range("field"); assertNotNull("Condition builder is not built", builder); RangeCondition condition = builder.build(); assertEquals("Condition field is not set", "field", condition.field); } @Test public void testRegexp() throws IOException { RegexpConditionBuilder builder = regexp("field", "value"); assertNotNull("Condition builder is not built", builder); RegexpCondition condition = builder.build(); assertEquals("Condition field is not set", "field", condition.field); assertEquals("Condition value is not set", "value", condition.value); } @Test public void testWildcard() throws IOException { WildcardConditionBuilder builder = wildcard("field", "value"); assertNotNull("Condition builder is not built", builder); WildcardCondition condition = builder.build(); assertEquals("Condition field is not set", "field", condition.field); assertEquals("Condition value is not set", "value", condition.value); } @Test public void testSortField() throws IOException { SimpleSortFieldBuilder builder = field("field"); assertNotNull("Condition builder is not built", builder); SimpleSortField sortField = builder.build(); assertEquals("Field is not set", "field", sortField.field); } }
1,834
1,909
<filename>spring-batch-core/src/test/java/org/springframework/batch/core/launch/support/TestJobParametersIncrementer.java /* * Copyright 2009-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.batch.core.launch.support; import org.springframework.batch.core.JobParameters; import org.springframework.batch.core.JobParametersBuilder; import org.springframework.batch.core.JobParametersIncrementer; import org.springframework.lang.Nullable; public class TestJobParametersIncrementer implements JobParametersIncrementer { @Override public JobParameters getNext(@Nullable JobParameters parameters) { return new JobParametersBuilder().addString("foo", "spam").toJobParameters(); } }
336
930
package com.dianping.pigeon.remoting.netty.codec; /** * @author qi.yin * 2016/05/11 上午12:12. */ public class CodecConstants { public static final int MEGIC_FIELD_LENGTH = 2; public static final int BODY_FIELD_LENGTH = 4; public static final int HEAD_LENGTH = 3; public static final int SEQ_FIELD_LENGTH = 8; public static final int EXPAND_FIELD_LENGTH = 3; public static final int TAIL_LENGTH = SEQ_FIELD_LENGTH + EXPAND_FIELD_LENGTH; public static final int FRONT_LENGTH = HEAD_LENGTH + BODY_FIELD_LENGTH; public static final int FRAME_LENGTH = HEAD_LENGTH + BODY_FIELD_LENGTH + TAIL_LENGTH; public static final int _MEGIC_FIELD_LENGTH = 2; public static final int _VERSION_FIELD_LENGTH = 1; public static final int _HEAD_LENGTH = 4; public static final int _HEAD_FIELD_LENGTH = 2; public static final int _TOTAL_FIELD_LENGTH = 4; public static final int _TAIL_LENGTH = 4; public static final int _FRONT_COMMAND_LENGTH = _MEGIC_FIELD_LENGTH + _VERSION_FIELD_LENGTH; public static final int _FRONT_LENGTH = _HEAD_LENGTH + _TOTAL_FIELD_LENGTH + _HEAD_FIELD_LENGTH; public static final int _FRONT_LENGTH_ = _HEAD_LENGTH + _TOTAL_FIELD_LENGTH; public static final byte MAGIC_FIRST = 0x39; public static final byte MAGIC_SECEND = 0x3A; public static final byte[] MAGIC = new byte[]{MAGIC_FIRST, MAGIC_SECEND}; public static final byte EXPAND_FIRST = 0x1D; public static final byte EXPAND_SECOND = 0x1E; public static final byte EXPAND_THIRD = 0x1F; public static final byte[] EXPAND = new byte[]{EXPAND_FIRST, EXPAND_SECOND, EXPAND_THIRD}; public static final byte _MAGIC_FIRST = (byte) 0xAB; public static final byte _MAGIC_SECEND = (byte) 0xBA; public static final byte[] _MAGIC = new byte[]{_MAGIC_FIRST, _MAGIC_SECEND}; public static final int ESTIMATED_LENGTH = 512; }
713
806
package net2.lingala.zip4j.util; import net2.lingala.zip4j.exception.ZipException; public class CRCUtil { private static final int BUF_SIZE = 16384; public static long computeFileCRC(String inputFile) throws ZipException { return computeFileCRC(inputFile, null); } /* JADX WARNING: inconsistent code. */ /* Code decompiled incorrectly, please refer to instructions dump. */ public static long computeFileCRC(java.lang.String r8, net2.lingala.zip4j.progress.ProgressMonitor r9) throws net2.lingala.zip4j.exception.ZipException { /* r6 = net2.lingala.zip4j.util.Zip4jUtil.isStringNotNullAndNotEmpty(r8); if (r6 != 0) goto L_0x000e; L_0x0006: r6 = new net2.lingala.zip4j.exception.ZipException; r7 = "input file is null or empty, cannot calculate CRC for the file"; r6.<init>(r7); throw r6; L_0x000e: r3 = 0; net2.lingala.zip4j.util.Zip4jUtil.checkFileReadAccess(r8); Catch:{ IOException -> 0x0069, Exception -> 0x0077 } r4 = new java.io.FileInputStream; Catch:{ IOException -> 0x0069, Exception -> 0x0077 } r6 = new java.io.File; Catch:{ IOException -> 0x0069, Exception -> 0x0077 } r6.<init>(r8); Catch:{ IOException -> 0x0069, Exception -> 0x0077 } r4.<init>(r6); Catch:{ IOException -> 0x0069, Exception -> 0x0077 } r6 = 16384; // 0x4000 float:2.2959E-41 double:8.0948E-320; r0 = new byte[r6]; Catch:{ IOException -> 0x008d, Exception -> 0x008a, all -> 0x0087 } r5 = -2; r1 = new java.util.zip.CRC32; Catch:{ IOException -> 0x008d, Exception -> 0x008a, all -> 0x0087 } r1.<init>(); Catch:{ IOException -> 0x008d, Exception -> 0x008a, all -> 0x0087 } L_0x0026: r5 = r4.read(r0); Catch:{ IOException -> 0x008d, Exception -> 0x008a, all -> 0x0087 } r6 = -1; if (r5 == r6) goto L_0x0056; L_0x002d: r6 = 0; r1.update(r0, r6, r5); Catch:{ IOException -> 0x008d, Exception -> 0x008a, all -> 0x0087 } if (r9 == 0) goto L_0x0026; L_0x0033: r6 = (long) r5; Catch:{ IOException -> 0x008d, Exception -> 0x008a, all -> 0x0087 } r9.updateWorkCompleted(r6); Catch:{ IOException -> 0x008d, Exception -> 0x008a, all -> 0x0087 } r6 = r9.isCancelAllTasks(); Catch:{ IOException -> 0x008d, Exception -> 0x008a, all -> 0x0087 } if (r6 == 0) goto L_0x0026; L_0x003d: r6 = 3; r9.setResult(r6); Catch:{ IOException -> 0x008d, Exception -> 0x008a, all -> 0x0087 } r6 = 0; r9.setState(r6); Catch:{ IOException -> 0x008d, Exception -> 0x008a, all -> 0x0087 } r6 = 0; if (r4 == 0) goto L_0x004c; L_0x0049: r4.close(); Catch:{ IOException -> 0x004d } L_0x004c: return r6; L_0x004d: r2 = move-exception; r6 = new net2.lingala.zip4j.exception.ZipException; r7 = "error while closing the file after calculating crc"; r6.<init>(r7); throw r6; L_0x0056: r6 = r1.getValue(); Catch:{ IOException -> 0x008d, Exception -> 0x008a, all -> 0x0087 } if (r4 == 0) goto L_0x004c; L_0x005c: r4.close(); Catch:{ IOException -> 0x0060 } goto L_0x004c; L_0x0060: r2 = move-exception; r6 = new net2.lingala.zip4j.exception.ZipException; r7 = "error while closing the file after calculating crc"; r6.<init>(r7); throw r6; L_0x0069: r2 = move-exception; L_0x006a: r6 = new net2.lingala.zip4j.exception.ZipException; Catch:{ all -> 0x0070 } r6.<init>(r2); Catch:{ all -> 0x0070 } throw r6; Catch:{ all -> 0x0070 } L_0x0070: r6 = move-exception; L_0x0071: if (r3 == 0) goto L_0x0076; L_0x0073: r3.close(); Catch:{ IOException -> 0x007e } L_0x0076: throw r6; L_0x0077: r2 = move-exception; L_0x0078: r6 = new net2.lingala.zip4j.exception.ZipException; Catch:{ all -> 0x0070 } r6.<init>(r2); Catch:{ all -> 0x0070 } throw r6; Catch:{ all -> 0x0070 } L_0x007e: r2 = move-exception; r6 = new net2.lingala.zip4j.exception.ZipException; r7 = "error while closing the file after calculating crc"; r6.<init>(r7); throw r6; L_0x0087: r6 = move-exception; r3 = r4; goto L_0x0071; L_0x008a: r2 = move-exception; r3 = r4; goto L_0x0078; L_0x008d: r2 = move-exception; r3 = r4; goto L_0x006a; */ throw new UnsupportedOperationException("Method not decompiled: net2.lingala.zip4j.util.CRCUtil.computeFileCRC(java.lang.String, net2.lingala.zip4j.progress.ProgressMonitor):long"); } }
2,377
362
<filename>tests/framegraph/UnitTests/UnitTest_VBuffer.cpp<gh_stars>100-1000 // Copyright (c) 2018-2020, <NAME>. For more information see 'LICENSE' #ifdef FG_ENABLE_VULKAN #include "VLocalBuffer.h" #include "VBarrierManager.h" #include "framegraph/Public/FrameGraph.h" #include "UnitTest_Common.h" #include "DummyTask.h" namespace FG { class VBufferUnitTest { public: using Barrier = VLocalBuffer::BufferAccess; static bool Create (VBuffer &buf, const BufferDesc &desc) { buf._desc = desc; return true; } static ArrayView<Barrier> GetReadBarriers (const VLocalBuffer *buf) { return buf->_accessForRead; } static ArrayView<Barrier> GetWriteBarriers (const VLocalBuffer *buf) { return buf->_accessForWrite; } }; using BufferState = VLocalBuffer::BufferState; } // FG static void VBuffer_Test1 () { VBarrierManager barrier_mngr; const auto tasks = GenDummyTasks( 30 ); auto task_iter = tasks.begin(); VBuffer global_buffer; VLocalBuffer local_buffer; VLocalBuffer const* buf = &local_buffer; TEST( VBufferUnitTest::Create( global_buffer, BufferDesc{ 1024_b, EBufferUsage::All })); TEST( local_buffer.Create( &global_buffer )); // pass 1 { buf->AddPendingState(BufferState{ EResourceState::TransferDst, 0, 512, (task_iter++)->get() }); buf->CommitBarrier( barrier_mngr, null ); auto w_barriers = VBufferUnitTest::GetWriteBarriers( buf ); TEST( w_barriers.size() == 1 ); TEST( w_barriers.back().range.begin == 0 ); TEST( w_barriers.back().range.end == 512 ); TEST( w_barriers.back().stages == VK_PIPELINE_STAGE_TRANSFER_BIT ); TEST( w_barriers.back().access == VK_ACCESS_TRANSFER_WRITE_BIT ); TEST( w_barriers.back().isReadable == false ); TEST( w_barriers.back().isWritable == true ); } // pass 2 { buf->AddPendingState(BufferState{ EResourceState::TransferSrc, 0, 64, (task_iter++)->get() }); buf->CommitBarrier( barrier_mngr, null ); auto r_barriers = VBufferUnitTest::GetReadBarriers( buf ); TEST( r_barriers.size() == 1 ); TEST( r_barriers.back().range.begin == 0 ); TEST( r_barriers.back().range.end == 64 ); TEST( r_barriers.back().stages == VK_PIPELINE_STAGE_TRANSFER_BIT ); TEST( r_barriers.back().access == VK_ACCESS_TRANSFER_READ_BIT ); TEST( r_barriers.back().isReadable == true ); TEST( r_barriers.back().isWritable == false ); } // pass 3 { buf->AddPendingState(BufferState{ EResourceState::UniformRead | EResourceState::_VertexShader, 64, 64+64, (task_iter++)->get() }); buf->CommitBarrier( barrier_mngr, null ); auto r_barriers = VBufferUnitTest::GetReadBarriers( buf ); TEST( r_barriers.size() == 2 ); TEST( r_barriers.back().range.begin == 64 ); TEST( r_barriers.back().range.end == (64+64) ); TEST( r_barriers.back().stages == VK_PIPELINE_STAGE_VERTEX_SHADER_BIT ); TEST( r_barriers.back().access == VK_ACCESS_UNIFORM_READ_BIT ); TEST( r_barriers.back().isReadable == true ); TEST( r_barriers.back().isWritable == false ); } // pass 4 { buf->AddPendingState(BufferState{ EResourceState::UniformRead | EResourceState::_FragmentShader, 256, 256+64, (task_iter++)->get() }); buf->CommitBarrier( barrier_mngr, null ); auto r_barriers = VBufferUnitTest::GetReadBarriers( buf ); TEST( r_barriers.size() == 3 ); TEST( r_barriers.back().range.begin == 256 ); TEST( r_barriers.back().range.end == (256+64) ); TEST( r_barriers.back().stages == VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT ); TEST( r_barriers.back().access == VK_ACCESS_UNIFORM_READ_BIT ); TEST( r_barriers.back().isReadable == true ); TEST( r_barriers.back().isWritable == false ); } // pass 5 { buf->AddPendingState(BufferState{ EResourceState::ShaderWrite | EResourceState::_ComputeShader, 512, 512+64, (task_iter++)->get() }); buf->CommitBarrier( barrier_mngr, null ); auto r_barriers = VBufferUnitTest::GetReadBarriers( buf ); auto w_barriers = VBufferUnitTest::GetWriteBarriers( buf ); TEST( r_barriers.size() == 3 ); TEST( w_barriers.size() == 2 ); TEST( w_barriers.back().range.begin == 512 ); TEST( w_barriers.back().range.end == (512+64) ); TEST( w_barriers.back().stages == VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT ); TEST( w_barriers.back().access == VK_ACCESS_SHADER_WRITE_BIT ); TEST( w_barriers.back().isReadable == false ); TEST( w_barriers.back().isWritable == true ); } // pass 6 { buf->AddPendingState(BufferState{ EResourceState::UniformRead | EResourceState::_VertexShader, 256+32, 256+64, (task_iter++)->get() }); buf->CommitBarrier( barrier_mngr, null ); auto r_barriers = VBufferUnitTest::GetReadBarriers( buf ); TEST( r_barriers.size() == 4 ); TEST( r_barriers[2].range.begin == 256 ); TEST( r_barriers[2].range.end == (256+32) ); TEST( r_barriers[2].stages == VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT ); TEST( r_barriers[2].access == VK_ACCESS_UNIFORM_READ_BIT ); TEST( r_barriers[2].isReadable == true ); TEST( r_barriers[2].isWritable == false ); TEST( r_barriers[2].index == ExeOrderIndex(4) ); TEST( r_barriers[3].range.begin == (256+32) ); TEST( r_barriers[3].range.end == (256+64) ); TEST( r_barriers[3].stages == VK_PIPELINE_STAGE_VERTEX_SHADER_BIT ); TEST( r_barriers[3].access == VK_ACCESS_UNIFORM_READ_BIT ); TEST( r_barriers[3].isReadable == true ); TEST( r_barriers[3].isWritable == false ); TEST( r_barriers[3].index == ExeOrderIndex(6) ); } // pass 7 { buf->AddPendingState(BufferState{ EResourceState::UniformRead | EResourceState::_GeometryShader, 256+16, 256+16+32, (task_iter++)->get() }); buf->CommitBarrier( barrier_mngr, null ); auto r_barriers = VBufferUnitTest::GetReadBarriers( buf ); TEST( r_barriers.size() == 5 ); TEST( r_barriers[2].range.begin == 256 ); TEST( r_barriers[2].range.end == (256+16) ); TEST( r_barriers[2].stages == VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT ); TEST( r_barriers[2].access == VK_ACCESS_UNIFORM_READ_BIT ); TEST( r_barriers[2].isReadable == true ); TEST( r_barriers[2].isWritable == false ); TEST( r_barriers[2].index == ExeOrderIndex(4) ); TEST( r_barriers[3].range.begin == (256+16) ); TEST( r_barriers[3].range.end == (256+16+32) ); TEST( r_barriers[3].stages == VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT ); TEST( r_barriers[3].access == VK_ACCESS_UNIFORM_READ_BIT ); TEST( r_barriers[3].isReadable == true ); TEST( r_barriers[3].isWritable == false ); TEST( r_barriers[3].index == ExeOrderIndex(7) ); TEST( r_barriers[4].range.begin == (256+16+32) ); TEST( r_barriers[4].range.end == (256+64) ); TEST( r_barriers[4].stages == VK_PIPELINE_STAGE_VERTEX_SHADER_BIT ); TEST( r_barriers[4].access == VK_ACCESS_UNIFORM_READ_BIT ); TEST( r_barriers[4].isReadable == true ); TEST( r_barriers[4].isWritable == false ); TEST( r_barriers[4].index == ExeOrderIndex(6) ); } // pass 8 { buf->AddPendingState(BufferState{ EResourceState::UniformRead | EResourceState::_GeometryShader, 16, 32, (task_iter++)->get() }); buf->CommitBarrier( barrier_mngr, null ); auto r_barriers = VBufferUnitTest::GetReadBarriers( buf ); TEST( r_barriers.size() == 7 ); TEST( r_barriers[0].range.begin == 0 ); TEST( r_barriers[0].range.end == 16 ); TEST( r_barriers[0].stages == VK_PIPELINE_STAGE_TRANSFER_BIT ); TEST( r_barriers[0].access == VK_ACCESS_TRANSFER_READ_BIT ); TEST( r_barriers[0].isReadable == true ); TEST( r_barriers[0].isWritable == false ); TEST( r_barriers[0].index == ExeOrderIndex(2) ); TEST( r_barriers[1].range.begin == 16 ); TEST( r_barriers[1].range.end == 32 ); TEST( r_barriers[1].stages == VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT ); TEST( r_barriers[1].access == VK_ACCESS_UNIFORM_READ_BIT ); TEST( r_barriers[1].isReadable == true ); TEST( r_barriers[1].isWritable == false ); TEST( r_barriers[1].index == ExeOrderIndex(8) ); TEST( r_barriers[2].range.begin == 32 ); TEST( r_barriers[2].range.end == 64 ); TEST( r_barriers[2].stages == VK_PIPELINE_STAGE_TRANSFER_BIT ); TEST( r_barriers[2].access == VK_ACCESS_TRANSFER_READ_BIT ); TEST( r_barriers[2].isReadable == true ); TEST( r_barriers[2].isWritable == false ); TEST( r_barriers[2].index == ExeOrderIndex(2) ); TEST( r_barriers[3].range.begin == 64 ); TEST( r_barriers[3].range.end == (64+64) ); TEST( r_barriers[3].stages == VK_PIPELINE_STAGE_VERTEX_SHADER_BIT ); TEST( r_barriers[3].access == VK_ACCESS_UNIFORM_READ_BIT ); TEST( r_barriers[3].isReadable == true ); TEST( r_barriers[3].isWritable == false ); TEST( r_barriers[3].index == ExeOrderIndex(3) ); } // pass 9 { buf->AddPendingState(BufferState{ EResourceState::ShaderRead | EResourceState::_ComputeShader, 0, 256+32, (task_iter++)->get() }); buf->CommitBarrier( barrier_mngr, null ); auto r_barriers = VBufferUnitTest::GetReadBarriers( buf ); TEST( r_barriers.size() == 3 ); TEST( r_barriers[0].range.begin == 0 ); TEST( r_barriers[0].range.end == (256+32) ); TEST( r_barriers[0].stages == VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT ); TEST( r_barriers[0].access == VK_ACCESS_SHADER_READ_BIT ); TEST( r_barriers[0].isReadable == true ); TEST( r_barriers[0].isWritable == false ); TEST( r_barriers[0].index == ExeOrderIndex(9) ); TEST( r_barriers[1].range.begin == (256+32) ); TEST( r_barriers[1].range.end == (256+16+32) ); TEST( r_barriers[1].stages == VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT ); TEST( r_barriers[1].access == VK_ACCESS_UNIFORM_READ_BIT ); TEST( r_barriers[1].isReadable == true ); TEST( r_barriers[1].isWritable == false ); TEST( r_barriers[1].index == ExeOrderIndex(7) ); TEST( r_barriers[2].range.begin == (256+16+32) ); TEST( r_barriers[2].range.end == (256+64) ); TEST( r_barriers[2].stages == VK_PIPELINE_STAGE_VERTEX_SHADER_BIT ); TEST( r_barriers[2].access == VK_ACCESS_UNIFORM_READ_BIT ); TEST( r_barriers[2].isReadable == true ); TEST( r_barriers[2].isWritable == false ); TEST( r_barriers[2].index == ExeOrderIndex(6) ); } // pass 10 { buf->AddPendingState(BufferState{ EResourceState::TransferDst, 64, 512, (task_iter++)->get() }); buf->CommitBarrier( barrier_mngr, null ); auto r_barriers = VBufferUnitTest::GetReadBarriers( buf ); auto w_barriers = VBufferUnitTest::GetWriteBarriers( buf ); TEST( r_barriers.size() == 1 ); TEST( w_barriers.size() == 3 ); TEST( r_barriers[0].range.begin == 0 ); TEST( r_barriers[0].range.end == 64 ); TEST( r_barriers[0].stages == VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT ); TEST( r_barriers[0].access == VK_ACCESS_SHADER_READ_BIT ); TEST( r_barriers[0].isReadable == true ); TEST( r_barriers[0].isWritable == false ); TEST( r_barriers[0].index == ExeOrderIndex(9) ); TEST( w_barriers[0].range.begin == 0 ); TEST( w_barriers[0].range.end == 64 ); TEST( w_barriers[0].stages == VK_PIPELINE_STAGE_TRANSFER_BIT ); TEST( w_barriers[0].access == VK_ACCESS_TRANSFER_WRITE_BIT ); TEST( w_barriers[0].isReadable == false ); TEST( w_barriers[0].isWritable == true ); TEST( w_barriers[0].index == ExeOrderIndex(1) ); TEST( w_barriers[1].range.begin == 64 ); TEST( w_barriers[1].range.end == 512 ); TEST( w_barriers[1].stages == VK_PIPELINE_STAGE_TRANSFER_BIT ); TEST( w_barriers[1].access == VK_ACCESS_TRANSFER_WRITE_BIT ); TEST( w_barriers[1].isReadable == false ); TEST( w_barriers[1].isWritable == true ); TEST( w_barriers[1].index == ExeOrderIndex(10) ); TEST( w_barriers[2].range.begin == 512 ); TEST( w_barriers[2].range.end == (512+64) ); TEST( w_barriers[2].stages == VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT ); TEST( w_barriers[2].access == VK_ACCESS_SHADER_WRITE_BIT ); TEST( w_barriers[2].isReadable == false ); TEST( w_barriers[2].isWritable == true ); TEST( w_barriers[2].index == ExeOrderIndex(5) ); } local_buffer.ResetState( ExeOrderIndex::Final, barrier_mngr, null ); local_buffer.Destroy(); //global_buffer.Destroy(); } extern void UnitTest_VBuffer () { VBuffer_Test1(); FG_LOGI( "UnitTest_VBuffer - passed" ); } #endif // FG_ENABLE_VULKAN
5,156
332
<gh_stars>100-1000 /** * Package for DIRT utils. */ package org.springframework.xd.dirt.util;
37
679
/************************************************************** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * *************************************************************/ // MARKER(update_precomp.py): autogen include statement, do not remove #include "precompiled_svtools.hxx" #include <svtools/svtdata.hxx> #include <svtools/svtools.hrc> #include <svtools/indexentryres.hxx> // ------------------------------------------------------------------------- // // wrapper for locale specific translations data of indexentry algorithm // // ------------------------------------------------------------------------- class IndexEntryRessourceData { friend class IndexEntryRessource; private: /* data */ String ma_Name; String ma_Translation; private: /* member functions */ IndexEntryRessourceData () {} public: IndexEntryRessourceData ( const String &r_Algorithm, const String &r_Translation) : ma_Name (r_Algorithm), ma_Translation (r_Translation) {} const String& GetAlgorithm () const { return ma_Name; } const String& GetTranslation () const { return ma_Translation; } ~IndexEntryRessourceData () {} IndexEntryRessourceData& operator= (const IndexEntryRessourceData& r_From) { ma_Name = r_From.GetAlgorithm(); ma_Translation = r_From.GetTranslation(); return *this; } }; // ------------------------------------------------------------------------- // // implementation of the indexentry-algorithm-name translation // // ------------------------------------------------------------------------- #define INDEXENTRY_RESSOURCE_COUNT (STR_SVT_INDEXENTRY_END - STR_SVT_INDEXENTRY_START + 1) IndexEntryRessource::IndexEntryRessource() { mp_Data = new IndexEntryRessourceData[INDEXENTRY_RESSOURCE_COUNT]; #define ASCSTR(str) String(RTL_CONSTASCII_USTRINGPARAM(str)) #define RESSTR(rid) String(SvtResId(rid)) mp_Data[STR_SVT_INDEXENTRY_ALPHANUMERIC - STR_SVT_INDEXENTRY_START] = IndexEntryRessourceData (ASCSTR("alphanumeric"), RESSTR(STR_SVT_INDEXENTRY_ALPHANUMERIC)); mp_Data[STR_SVT_INDEXENTRY_DICTIONARY - STR_SVT_INDEXENTRY_START] = IndexEntryRessourceData (ASCSTR("dict"), RESSTR(STR_SVT_INDEXENTRY_DICTIONARY)); mp_Data[STR_SVT_INDEXENTRY_PINYIN - STR_SVT_INDEXENTRY_START] = IndexEntryRessourceData (ASCSTR("pinyin"), RESSTR(STR_SVT_INDEXENTRY_PINYIN)); mp_Data[STR_SVT_INDEXENTRY_PINYIN - STR_SVT_INDEXENTRY_START] = IndexEntryRessourceData (ASCSTR("radical"), RESSTR(STR_SVT_INDEXENTRY_RADICAL)); mp_Data[STR_SVT_INDEXENTRY_STROKE - STR_SVT_INDEXENTRY_START] = IndexEntryRessourceData (ASCSTR("stroke"), RESSTR(STR_SVT_INDEXENTRY_STROKE)); mp_Data[STR_SVT_INDEXENTRY_STROKE - STR_SVT_INDEXENTRY_START] = IndexEntryRessourceData (ASCSTR("zhuyin"), RESSTR(STR_SVT_INDEXENTRY_ZHUYIN)); mp_Data[STR_SVT_INDEXENTRY_ZHUYIN - STR_SVT_INDEXENTRY_START] = IndexEntryRessourceData (ASCSTR("phonetic (alphanumeric first) (grouped by syllable)"), RESSTR(STR_SVT_INDEXENTRY_PHONETIC_FS)); mp_Data[STR_SVT_INDEXENTRY_PHONETIC_FS - STR_SVT_INDEXENTRY_START] = IndexEntryRessourceData (ASCSTR("phonetic (alphanumeric first) (grouped by consonant)"), RESSTR(STR_SVT_INDEXENTRY_PHONETIC_FC)); mp_Data[STR_SVT_INDEXENTRY_PHONETIC_FC - STR_SVT_INDEXENTRY_START] = IndexEntryRessourceData (ASCSTR("phonetic (alphanumeric last) (grouped by syllable)"), RESSTR(STR_SVT_INDEXENTRY_PHONETIC_LS)); mp_Data[STR_SVT_INDEXENTRY_PHONETIC_LS - STR_SVT_INDEXENTRY_START] = IndexEntryRessourceData (ASCSTR("phonetic (alphanumeric last) (grouped by consonant)"), RESSTR(STR_SVT_INDEXENTRY_PHONETIC_LC)); } IndexEntryRessource::~IndexEntryRessource() { delete[] mp_Data; } const String& IndexEntryRessource::GetTranslation (const String &r_Algorithm) { xub_StrLen nIndex = r_Algorithm.Search('.'); String aLocaleFreeAlgorithm; if (nIndex == STRING_NOTFOUND) aLocaleFreeAlgorithm = r_Algorithm; else { nIndex += 1; aLocaleFreeAlgorithm = String(r_Algorithm, nIndex, r_Algorithm.Len() - nIndex); } for (sal_uInt32 i = 0; i < INDEXENTRY_RESSOURCE_COUNT; i++) if (aLocaleFreeAlgorithm == mp_Data[i].GetAlgorithm()) return mp_Data[i].GetTranslation(); return r_Algorithm; }
1,869
679
<reponame>Grosskopf/openoffice<filename>main/salhelper/test/dynamicloader/samplelib.cxx /************************************************************** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * *************************************************************/ #include "samplelib.hxx" #include <sal/types.h> /* */ extern "C" SampleLib_Api* SAL_CALL initSampleLibApi(void) { static SampleLib_Api aApi= {0,0}; if (!aApi.funcA) { aApi.funcA= &funcA; aApi.funcB= &funcB; return (&aApi); } else { return (&aApi); } } sal_Int32 SAL_CALL funcA( sal_Int32 a) { return a; } double SAL_CALL funcB( double a) { return a; }
476
2,983
{ "baseBranch": "feature/proto", "automerge": true, "semanticCommits":true, "unpublishSafe":true, "prCreation":"not-pending", "masterIssue":true, "extends": [ "config:base" ] }
86
359
<reponame>sahilg-xilinx/XRT /** * Copyright (C) 2016-2019 Xilinx, Inc * * Licensed under the Apache License, Version 2.0 (the "License"). You may * not use this file except in compliance with the License. A copy of the * License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ // g++ -g -std=c++14 -I$XILINX_XRT/include -L$XILINX_XRT/lib -I.. -o host.exe hello.cpp -lxilinxopencl #include "hostsrc/utils.hpp" #include "CL/cl_ext_xilinx.h" #include "experimental/xrt++.hpp" #define LENGTH (20) // From HLS, must match verify.xclbin #define XHELLO_HELLO_CONTROL_ADDR_ACCESS1_DATA 0x40 namespace { using utils::throw_if_error; static void help() { std::cout << "usage: %s <bitstream> [options] \n\n"; std::cout << " [-d <index>] : index of device to use (default: 0)\n"; std::cout << " [-x] : use alternative experimental API (xrtcpp) (default: off)\n"; std::cout << " [-l <loops>] : loop kernel execution loops number of times (default: 1)\n"; std::cout << " [-w] : wait for each kernel execution to finish in loop iteration (default: off)\n"; std::cout << "* Bitstream is required\n"; } int run(int argc, char* argv[]) { std::string xclbin; unsigned int device_index = 0; bool xrt = false; bool wait = false; size_t loops = 1; std::vector<std::string> args(argv+1,argv+argc); std::string cur; for (auto& arg : args) { if (arg == "-h") { help(); return 1; } if (arg == "-x") { xrt = true; continue; } if (arg == "-w") { wait = true; continue; } if (arg[0] == '-') { cur = arg; continue; } if (cur == "-d") device_index = std::stoi(arg); else if (cur == "-l") loops = std::stoi(arg); else { xclbin = arg; continue; } } auto platform = utils::open_platform("Xilinx","Xilinx"); auto device = utils::get_device(platform,device_index); cl_int err = CL_SUCCESS; auto context = clCreateContext(nullptr, 1, &device, nullptr, nullptr, &err); throw_if_error(err,"clCreateContext failed"); auto queue = clCreateCommandQueue(context, device, CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE, &err); throw_if_error(err || !queue,"clCreateCommandQueue failed"); auto bitstream = utils::read_xclbin(xclbin); auto size = bitstream.size(); auto data = reinterpret_cast<const unsigned char*>(bitstream.data()); auto program = clCreateProgramWithBinary(context, 1, &device, &size, &data, nullptr, &err); throw_if_error(err || !program,"clCreateProgramWithBinary failed"); auto kernel = clCreateKernel(program, "hello", &err); throw_if_error(err || !kernel,"clCreateKernel failed"); auto d_buf = clCreateBuffer(context, CL_MEM_WRITE_ONLY, sizeof(char) * LENGTH, nullptr, &err); throw_if_error(err || !d_buf,"clCreateKernel failed"); if (xrt) { uint64_t d_buf_addr; throw_if_error(xclGetMemObjDeviceAddress(d_buf,device,sizeof(uint64_t),&d_buf_addr),"failed to get dbuf address"); auto xdev = xclGetXrtDevice(device,&err); throw_if_error(err || !xdev,"failed to get xrt device"); xrtcpp::acquire_cu_context(xdev,0/*cuidx*/); auto start = utils::time_ns(); for (int i=0; i<loops; ++i) { xrtcpp::exec::exec_cu_command cmd(xdev); cmd.add_cu(0); cmd.add(XHELLO_HELLO_CONTROL_ADDR_ACCESS1_DATA>>2,d_buf_addr); // low cmd.add((XHELLO_HELLO_CONTROL_ADDR_ACCESS1_DATA>>2)+1,(d_buf_addr >> 32) & 0xFFFFFFFF); // high part of a cmd.execute(); cmd.wait(); } auto end = utils::time_ns(); std::cout << "total (ms): " << (end-start)*1e-6 << "\n"; xrtcpp::release_cu_context(xdev,0/*cuidx*/); } else { auto start = utils::time_ns(); for (int i=0; i<loops; ++i) { throw_if_error(clSetKernelArg(kernel, 0, sizeof(cl_mem), &d_buf),"clSetKenelArg failed"); throw_if_error(clEnqueueTask(queue, kernel, 0, nullptr, nullptr),"clEnqueueTask failed"); if (wait) clFinish(queue); } clFinish(queue); auto end = utils::time_ns(); std::cout << "total (ms): " << (end-start)*1e-6 << "\n"; } char h_buf[LENGTH] = {0}; throw_if_error(clEnqueueReadBuffer(queue, d_buf, CL_TRUE, 0, sizeof(char) * LENGTH, h_buf, 0, nullptr, nullptr),"clEnqueueReadBuffer failed"); std::cout << "RESULT: " << h_buf << "\n"; clReleaseMemObject(d_buf); clReleaseProgram(program); clReleaseKernel(kernel); clReleaseCommandQueue(queue); clReleaseContext(context); clReleaseDevice(device); return 0; } } int main(int argc, char* argv[]) { try { auto ret = run(argc,argv); std::cout << "SUCCESS\n"; return ret; } catch (const std::exception& ex) { std::cout << "FAIL: " << ex.what() << "\n"; return 1; } catch (...) { std::cout << "FAIL\n"; return 1; } }
2,114
1,192
<gh_stars>1000+ //===--- OacrIgnoreCond.h - OACR directives ---------------------*- C++ -*-===// /////////////////////////////////////////////////////////////////////////////// // // // OacrIgnoreCond.h // // Copyright (C) Microsoft Corporation. All rights reserved. // // This file is distributed under the University of Illinois Open Source // // License. See LICENSE.TXT for details. // // // /////////////////////////////////////////////////////////////////////////////// #pragma once // // In free builds, configuration options relating to compiler switches, // most importantly languages, become constants, thereby removing // codepaths and reducing disk footprint. // // OACR has a number of warnings however for these degenerate conditionals, // which this file suppresses. // OACR error 6235 #pragma prefast(disable: __WARNING_NONZEROLOGICALOR, "external project has dead branches for unsupported configuration combinations, by design") // OACR error 6236 #pragma prefast(disable: __WARNING_LOGICALORNONZERO, "external project has dead branches for unsupported configuration combinations, by design") // OACR error 6236 #pragma prefast(disable: __WARNING_ZEROLOGICALANDLOSINGSIDEEFFECTS, "external project has dead branches for unsupported configuration combinations, by design") // OACR error 6285 #pragma prefast(disable: __WARNING_LOGICALOROFCONSTANTS, "external project has dead branches for unsupported configuration combinations, by design") // OACR error 6286 #pragma prefast(disable: __WARNING_NONZEROLOGICALORLOSINGSIDEEFFECTS, "external project has dead branches for unsupported configuration combinations, by design") // OACR error 6287 #pragma prefast(disable: __WARNING_REDUNDANTTEST, "external project has dead branches for unsupported configuration combinations, by design") // local variable is initialized but not referenced - every LangOpts use on stack triggers this #pragma warning(disable: 4189)
744
985
<filename>tests/FullScreenTest.h #import "cocos2d.h" #import "BaseAppController.h" @interface AppController : BaseAppController @end @interface FullScreenDemo: CCLayer { } -(NSString*) title; -(NSString*) subtitle; @end @interface FullScreenScale : FullScreenDemo {} -(void) addNewSpriteWithCoords:(CGPoint)p; @end @interface FullScreenNoScale : FullScreenDemo {} -(void) addNewSpriteWithCoords:(CGPoint)p; @end @interface FullScreenIssue1071Test : FullScreenDemo { //weak ref CCMenuItemFont *issueTestItem_; } @end
191
1,615
// // MLNUICollectionViewLayoutProtocol.h // MLNUI // // Created by MoMo on 2019/9/16. // #ifndef MLNUICollectionViewLayoutProtocol_h #define MLNUICollectionViewLayoutProtocol_h #import <UIKit/UIKit.h> #import "MLNUIScrollViewConst.h" @protocol MLNUICollectionViewLayoutProtocol <NSObject> @required @property (nonatomic, assign) UICollectionViewScrollDirection scrollDirection; - (void)relayoutIfNeed; @end #endif /* MLNUICollectionViewLayoutProtocol_h */
180
359
import logging import sys from obsei.source import OSGoogleMapsReviewsSource, OSGoogleMapsReviewsConfig logger = logging.getLogger(__name__) logging.basicConfig(stream=sys.stdout, level=logging.INFO) source_config = OSGoogleMapsReviewsConfig( api_key="<Enter Your API Key>", # Get API key from https://outscraper.com/ queries=[ "https://www.google.co.in/maps/place/Taj+Mahal/@27.1751496,78.0399535,17z/data=!4m5!3m4!1s0x39747121d702ff6d:0xdd2ae4803f767dde!8m2!3d27.1751448!4d78.0421422" ], number_of_reviews=3, ) source = OSGoogleMapsReviewsSource() source_response_list = source.lookup(source_config) for source_response in source_response_list: logger.info(source_response.__dict__)
287
852
<filename>L1Trigger/L1TMuonCPPF/test/cppf_emulator_MC.py # Auto generated configuration file # using: # Revision: 1.19 # Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v # with command line options: SingleMuPt10_pythia8_cfi.py -s GEN,SIM,DIGI --pileup=NoPileUp --geometry DB --conditions=auto:run1_mc --eventcontent FEVTDEBUGHLT --no_exec -n 30 import FWCore.ParameterSet.Config as cms import datetime import random process = cms.Process('DIGI') # import of standard configurations process.load('Configuration.StandardSequences.Services_cff') process.load('SimGeneral.HepPDTESSource.pythiapdt_cfi') process.load('FWCore.MessageService.MessageLogger_cfi') process.load('Configuration.EventContent.EventContent_cff') process.load('SimGeneral.MixingModule.mixNoPU_cfi') #process.load('Configuration.StandardSequences.GeometryRecoDB_cff') #process.load('Configuration.Geometry.GeometryDB_cff') #process.load('Configuration.StandardSequences.GeometryExtended_cff') process.load('Configuration.Geometry.GeometryExtended2016_cff') process.load('Configuration.Geometry.GeometryExtended2016Reco_cff') process.load('Configuration.StandardSequences.MagneticField_38T_cff') process.load('Configuration.StandardSequences.Generator_cff') process.load('IOMC.EventVertexGenerators.VtxSmearedRealistic50ns13TeVCollision_cfi') process.load('GeneratorInterface.Core.genFilterSummary_cff') process.load('Configuration.StandardSequences.SimIdeal_cff') process.load('Configuration.StandardSequences.Digi_cff') process.load('Configuration.StandardSequences.EndOfProcess_cff') process.load('Configuration.StandardSequences.FrontierConditions_GlobalTag_cff') process.load('RecoLocalMuon.RPCRecHit.rpcRecHits_cfi') from RecoLocalMuon.RPCRecHit.rpcRecHits_cfi import * process.load('L1Trigger.L1TMuonCPPF.emulatorCppfDigis_cfi') from L1Trigger.L1TMuonCPPF.emulatorCppfDigis_cfi import * process.MessageLogger.cerr.FwkReport.reportEvery = cms.untracked.int32(1) process.MessageLogger = cms.Service("MessageLogger") process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32(300) ) # Input source process.source = cms.Source("EmptySource" ) process.options = cms.untracked.PSet( ) # Production Info process.configurationMetadata = cms.untracked.PSet( annotation = cms.untracked.string('SingleMuPt10_pythia8_cfi.py nevts:100'), name = cms.untracked.string('Applications'), version = cms.untracked.string('$Revision: 1.19 $') ) # Output definition process.FEVTDEBUGHLToutput = cms.OutputModule("PoolOutputModule", SelectEvents = cms.untracked.PSet( SelectEvents = cms.vstring('generation_step') ), dataset = cms.untracked.PSet( dataTier = cms.untracked.string(''), filterName = cms.untracked.string('') ), eventAutoFlushCompressedSize = cms.untracked.int32(10485760), fileName = cms.untracked.string('SingleMuPt10_pythia8_cfi_py_GEN_SIM_DIGI.root'), outputCommands = cms.untracked.vstring('drop *',"keep *_emulatorMuonRPCDigis_*_*", "keep *_emulatorCppfDigis_*_*", "keep *_rpcRecHits_*_*", "keep *_genParticles_*_*"), #outputCommands = process.FEVTDEBUGHLTEventContent.outputCommands, splitLevel = cms.untracked.int32(0) ) # Additional output definition # Other statements process.genstepfilter.triggerConditions=cms.vstring("generation_step") from Configuration.AlCa.GlobalTag import GlobalTag process.GlobalTag = GlobalTag(process.GlobalTag, 'auto:run2_mc', '') from IOMC.RandomEngine.RandomServiceHelper import RandomNumberServiceHelper randHelper = RandomNumberServiceHelper(process.RandomNumberGeneratorService) randHelper.populate() process.RandomNumberGeneratorService.saveFileName = cms.untracked.string("RandomEngineState.log") process.generator = cms.EDFilter("Pythia8PtGun", PGunParameters = cms.PSet( AddAntiParticle = cms.bool(True), MaxEta = cms.double(1.6), MaxPhi = cms.double(3.14159265359), MaxPt = cms.double(30.1), MinEta = cms.double(1.2), MinPhi = cms.double(-3.14159265359), MinPt = cms.double(1.1), ParticleID = cms.vint32(-13) ), PythiaParameters = cms.PSet( parameterSets = cms.vstring() ), Verbosity = cms.untracked.int32(0), firstRun = cms.untracked.uint32(1), psethack = cms.string('single mu pt 10') ) process.rpcRecHits.rpcDigiLabel = 'simMuonRPCDigis' process.emulatorCppfDigis.recHitLabel = 'rpcRecHits' # Path and EndPath definitions process.generation_step = cms.Path(process.pgen) process.simulation_step = cms.Path(process.psim) process.digitisation_step = cms.Path(process.pdigi) process.rpcrechits_step = cms.Path(process.rpcRecHits) process.emulatorCppfDigis_step = cms.Path(process.emulatorCppfDigis) process.genfiltersummary_step = cms.EndPath(process.genFilterSummary) process.endjob_step = cms.EndPath(process.endOfProcess) process.FEVTDEBUGHLToutput_step = cms.EndPath(process.FEVTDEBUGHLToutput) # Schedule definition process.schedule = cms.Schedule(process.generation_step,process.genfiltersummary_step,process.simulation_step,process.digitisation_step,process.rpcrechits_step,process.emulatorCppfDigis_step,process.endjob_step,process.FEVTDEBUGHLToutput_step) from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask associatePatAlgosToolsTask(process) # filter all path with the production filter sequence for path in process.paths: getattr(process,path)._seq = process.generator * getattr(process,path)._seq # Customisation from command line # Add early deletion of temporary data products to reduce peak memory need from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete process = customiseEarlyDelete(process) # End adding early deletion
2,126
997
#ifndef _BLAS_H_ #define _BLAS_H_ /// @file blas.h /// @brief Functions for implementing basic linear algebra functions. /// #include "rainbow_config.h" #include <stddef.h> #include <stdint.h> void PQCLEAN_RAINBOWIIICIRCUMZENITHAL_CLEAN_gf256v_predicated_add(uint8_t *accu_b, uint8_t predicate, const uint8_t *a, size_t _num_byte); void PQCLEAN_RAINBOWIIICIRCUMZENITHAL_CLEAN_gf256v_add(uint8_t *accu_b, const uint8_t *a, size_t _num_byte); void PQCLEAN_RAINBOWIIICIRCUMZENITHAL_CLEAN_gf256v_mul_scalar(uint8_t *a, uint8_t b, size_t _num_byte); void PQCLEAN_RAINBOWIIICIRCUMZENITHAL_CLEAN_gf256v_madd(uint8_t *accu_c, const uint8_t *a, uint8_t gf256_b, size_t _num_byte); #endif // _BLAS_H_
331
1,883
<gh_stars>1000+ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright 2012 The MITRE Corporation * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ /*! * \ingroup cli * \page cli_age_estimation Age Estimation * \ref cpp_age_estimation "C++ Equivalent" * \code * $ br -algorithm AgeEstimation \ * -enroll ../data/MEDS/img/S354-01-t10_01.jpg ../data/MEDS/img/S001-01-t10_01.jpg metadata.csv * \endcode */ //! [age_estimation] #include <openbr/openbr_plugin.h> static void printTemplate(const br::Template &t) { printf("%s age: %d\n", qPrintable(t.file.fileName()), int(t.file.get<float>("Age"))); } int main(int argc, char *argv[]) { br::Context::initialize(argc, argv); // Retrieve class for enrolling templates using the AgeEstimation algorithm QSharedPointer<br::Transform> transform = br::Transform::fromAlgorithm("AgeEstimation"); // Initialize templates br::Template queryA("../data/MEDS/img/S354-01-t10_01.jpg"); br::Template queryB("../data/MEDS/img/S001-01-t10_01.jpg"); // Enroll templates queryA >> *transform; queryB >> *transform; printTemplate(queryA); printTemplate(queryB); br::Context::finalize(); return 0; } //! [age_estimation]
1,079
1,442
<filename>poincare/src/arc_tangent.cpp #include <poincare/arc_tangent.h> #include <poincare/complex.h> #include <poincare/layout_helper.h> #include <poincare/serialization_helper.h> #include <poincare/simplification_helper.h> #include <cmath> namespace Poincare { constexpr Expression::FunctionHelper ArcTangent::s_functionHelper; int ArcTangentNode::numberOfChildren() const { return ArcTangent::s_functionHelper.numberOfChildren(); } Expression ArcTangentNode::setSign(Sign s, ReductionContext reductionContext) { return ArcTangent(this).setSign(s, reductionContext); } Layout ArcTangentNode::createLayout(Preferences::PrintFloatMode floatDisplayMode, int numberOfSignificantDigits) const { return LayoutHelper::Prefix(ArcTangent(this), floatDisplayMode, numberOfSignificantDigits, ArcTangent::s_functionHelper.name()); } int ArcTangentNode::serialize(char * buffer, int bufferSize, Preferences::PrintFloatMode floatDisplayMode, int numberOfSignificantDigits) const { return SerializationHelper::Prefix(this, buffer, bufferSize, floatDisplayMode, numberOfSignificantDigits, ArcTangent::s_functionHelper.name()); } template<typename T> Complex<T> ArcTangentNode::computeOnComplex(const std::complex<T> c, Preferences::ComplexFormat, Preferences::AngleUnit angleUnit) { std::complex<T> result; if (c.imag() == 0 && std::fabs(c.real()) <= (T)1.0) { /* atan: R -> R * In these cases we rather use std::atan(double) because atan on complexes * is not as precise as atan on double in std library. For instance, * - atan(complex<double>(0.01,0.0) = complex(9.9996666866652E-3,5.5511151231258E-17) * - atan(0.03) = 9.9996666866652E-3 */ result = std::atan(c.real()); } else { result = std::atan(c); /* atan has a branch cut on ]-inf*i, -i[U]i, +inf*i[: it is then multivalued * on this cut. We followed the convention chosen by the lib c++ of llvm on * ]-i+0, -i*inf+0[ (warning: atan takes the other side of the cut values on * ]-i+0, -i*inf+0[) and choose the values on ]-inf*i, -i[ to comply with * atan(-x) = -atan(x) and sin(atan(x)) = x/sqrt(1+x^2). */ if (c.real() == 0 && c.imag() < -1) { result.real(-result.real()); // other side of the cut } } result = ApproximationHelper::NeglectRealOrImaginaryPartIfNeglectable(result, c); return Complex<T>::Builder(Trigonometry::ConvertRadianToAngleUnit(result, angleUnit)); } Expression ArcTangentNode::shallowReduce(ReductionContext reductionContext) { return ArcTangent(this).shallowReduce(reductionContext); } Expression ArcTangent::setSign(ExpressionNode::Sign s, ExpressionNode::ReductionContext reductionContext) { return defaultOddFunctionSetSign(s, reductionContext); } Expression ArcTangent::shallowReduce(ExpressionNode::ReductionContext reductionContext) { { Expression e = SimplificationHelper::defaultShallowReduce(*this); if (!e.isUninitialized()) { return e; } } return Trigonometry::shallowReduceInverseFunction(*this, reductionContext); } }
1,052
677
<filename>Core/base/poller/utility.cc<gh_stars>100-1000 #include "base/poller/utility.h" #include <fcntl.h> namespace base { bool SetNonBlocking(int fd) { int flags = fcntl(fd, F_GETFL, 0); if (flags == -1) return false; if (flags & O_NONBLOCK) return true; if (fcntl(fd, F_SETFL, flags | O_NONBLOCK) == -1) return false; return true; } } // namespace base
169
852
#!/usr/bin/env python3 # test for pytables # taken from https://kastnerkyle.github.io/posts/using-pytables-for-larger-than-ram-data-processing/ # but with some interface modifications (presumably due to pytables changes) import numpy as np import matplotlib matplotlib.use('Agg') import matplotlib.pyplot as plt import tables random_state = np.random.RandomState(1999) def make_random_cluster_points(n_samples, random_state=random_state): mu_options = np.array([(-1, -1), (1, 1), (1, -1), (-1, 1)]) sigma = 0.2 mu_choices = random_state.randint(0, len(mu_options), size=n_samples) means = mu_options[mu_choices] return means + np.random.randn(n_samples, 2) * sigma, mu_choices def plot_clusters(data, clusters, name): plt.figure() colors = ["#9b59b6", "#3498db", "#e74c3c", "#2ecc71"] for i in np.unique(clusters): plt.scatter(data[clusters==i, 0], data[clusters==i, 1], color=colors[i]) plt.axis('off') plt.title('Plot from %s' % name) sample_data, sample_clusters = make_random_cluster_points(10000) hdf5_path = "my_data.hdf5" hdf5_file = tables.file.open_file(hdf5_path, mode='w') data_storage = hdf5_file.create_array(hdf5_file.root, 'data', sample_data) clusters_storage = hdf5_file.create_array(hdf5_file.root, 'clusters', sample_clusters) hdf5_file.close() hdf5_path = "my_data.hdf5" read_hdf5_file = tables.file.open_file(hdf5_path, mode='r') # Here we slice [:] all the data back into memory, then operate on it hdf5_data = read_hdf5_file.root.data[:] hdf5_clusters = read_hdf5_file.root.clusters[:] read_hdf5_file.close() plot_clusters(hdf5_data, hdf5_clusters, "PyTables Array")
680
449
# Copyright (c) OpenMMLab. All rights reserved. import torch from mmcv.utils import to_2tuple from mmdet.core.anchor import AnchorGenerator from .builder import ROTATED_ANCHOR_GENERATORS @ROTATED_ANCHOR_GENERATORS.register_module() class RotatedAnchorGenerator(AnchorGenerator): """Fake rotate anchor generator for 2D anchor-based detectors. Horizontal bounding box represented by (x,y,w,h,theta). """ def single_level_grid_priors(self, featmap_size, level_idx, dtype=torch.float32, device='cuda'): """Generate grid anchors of a single level. Note: This function is usually called by method ``self.grid_priors``. Args: featmap_size (tuple[int]): Size of the feature maps. level_idx (int): The index of corresponding feature map level. dtype (obj:`torch.dtype`): Date type of points.Defaults to ``torch.float32``. device (str, optional): The device the tensor will be put on. Defaults to 'cuda'. Returns: torch.Tensor: Anchors in the overall feature maps. """ anchors = super(RotatedAnchorGenerator, self).single_level_grid_priors( featmap_size, level_idx, dtype=dtype, device=device) # The correct usage is: # from ..bbox.transforms import hbb2obb # anchors = hbb2obb(anchors, self.angle_version) # instead of rudely setting the angle to all 0. # However, the experiment shows that the performance has decreased. num_anchors = anchors.size(0) xy = (anchors[:, 2:] + anchors[:, :2]) / 2 wh = anchors[:, 2:] - anchors[:, :2] theta = xy.new_zeros((num_anchors, 1)) anchors = torch.cat([xy, wh, theta], axis=1) return anchors @ROTATED_ANCHOR_GENERATORS.register_module() class PseudoAnchorGenerator(AnchorGenerator): """Non-Standard pseudo anchor generator that is used to generate valid flags only!""" def __init__(self, strides): self.strides = [to_2tuple(stride) for stride in strides] @property def num_base_anchors(self): """list[int]: total number of base anchors in a feature grid""" return [1 for _ in self.strides] def single_level_grid_anchors(self, featmap_sizes, device='cuda'): """Calling its grid_anchors() method will raise NotImplementedError!""" raise NotImplementedError def __repr__(self): indent_str = ' ' repr_str = self.__class__.__name__ + '(\n' repr_str += f'{indent_str}strides={self.strides})' return repr_str
1,217
2,151
/* * Copyright (C) 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License */ package com.android.systemui.qs; import android.content.Context; import android.content.res.TypedArray; import android.database.DataSetObserver; import android.util.AttributeSet; import android.view.View; import android.view.ViewGroup; import android.widget.BaseAdapter; import com.android.systemui.R; import java.lang.ref.WeakReference; /** * A view that arranges it's children in a grid with a fixed number of evenly spaced columns. * * {@see android.widget.GridView} */ public class PseudoGridView extends ViewGroup { private int mNumColumns = 3; private int mVerticalSpacing; private int mHorizontalSpacing; public PseudoGridView(Context context, AttributeSet attrs) { super(context, attrs); final TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.PseudoGridView); final int N = a.getIndexCount(); for (int i = 0; i < N; i++) { int attr = a.getIndex(i); switch (attr) { case R.styleable.PseudoGridView_numColumns: mNumColumns = a.getInt(attr, 3); break; case R.styleable.PseudoGridView_verticalSpacing: mVerticalSpacing = a.getDimensionPixelSize(attr, 0); break; case R.styleable.PseudoGridView_horizontalSpacing: mHorizontalSpacing = a.getDimensionPixelSize(attr, 0); break; } } a.recycle(); } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { if (MeasureSpec.getMode(widthMeasureSpec) == MeasureSpec.UNSPECIFIED) { throw new UnsupportedOperationException("Needs a maximum width"); } int width = MeasureSpec.getSize(widthMeasureSpec); int childWidth = (width - (mNumColumns - 1) * mHorizontalSpacing) / mNumColumns; int childWidthSpec = MeasureSpec.makeMeasureSpec(childWidth, MeasureSpec.EXACTLY); int childHeightSpec = MeasureSpec.UNSPECIFIED; int totalHeight = 0; int children = getChildCount(); int rows = (children + mNumColumns - 1) / mNumColumns; for (int row = 0; row < rows; row++) { int startOfRow = row * mNumColumns; int endOfRow = Math.min(startOfRow + mNumColumns, children); int maxHeight = 0; for (int i = startOfRow; i < endOfRow; i++) { View child = getChildAt(i); child.measure(childWidthSpec, childHeightSpec); maxHeight = Math.max(maxHeight, child.getMeasuredHeight()); } int maxHeightSpec = MeasureSpec.makeMeasureSpec(maxHeight, MeasureSpec.EXACTLY); for (int i = startOfRow; i < endOfRow; i++) { View child = getChildAt(i); if (child.getMeasuredHeight() != maxHeight) { child.measure(childWidthSpec, maxHeightSpec); } } totalHeight += maxHeight; if (row > 0) { totalHeight += mVerticalSpacing; } } setMeasuredDimension(width, resolveSizeAndState(totalHeight, heightMeasureSpec, 0)); } @Override protected void onLayout(boolean changed, int l, int t, int r, int b) { boolean isRtl = isLayoutRtl(); int children = getChildCount(); int rows = (children + mNumColumns - 1) / mNumColumns; int y = 0; for (int row = 0; row < rows; row++) { int x = isRtl ? getWidth() : 0; int maxHeight = 0; int startOfRow = row * mNumColumns; int endOfRow = Math.min(startOfRow + mNumColumns, children); for (int i = startOfRow; i < endOfRow; i++) { View child = getChildAt(i); int width = child.getMeasuredWidth(); int height = child.getMeasuredHeight(); if (isRtl) { x -= width; } child.layout(x, y, x + width, y + height); maxHeight = Math.max(maxHeight, height); if (isRtl) { x -= mHorizontalSpacing; } else { x += width + mHorizontalSpacing; } } y += maxHeight; if (row > 0) { y += mVerticalSpacing; } } } /** * Bridges between a ViewGroup and a BaseAdapter. * <p> * Usage: {@code ViewGroupAdapterBridge.link(viewGroup, adapter)} * <br /> * After this call, the ViewGroup's children will be provided by the adapter. */ public static class ViewGroupAdapterBridge extends DataSetObserver { private final WeakReference<ViewGroup> mViewGroup; private final BaseAdapter mAdapter; private boolean mReleased; public static void link(ViewGroup viewGroup, BaseAdapter adapter) { new ViewGroupAdapterBridge(viewGroup, adapter); } private ViewGroupAdapterBridge(ViewGroup viewGroup, BaseAdapter adapter) { mViewGroup = new WeakReference<>(viewGroup); mAdapter = adapter; mReleased = false; mAdapter.registerDataSetObserver(this); refresh(); } private void refresh() { if (mReleased) { return; } ViewGroup viewGroup = mViewGroup.get(); if (viewGroup == null) { release(); return; } final int childCount = viewGroup.getChildCount(); final int adapterCount = mAdapter.getCount(); final int N = Math.max(childCount, adapterCount); for (int i = 0; i < N; i++) { if (i < adapterCount) { View oldView = null; if (i < childCount) { oldView = viewGroup.getChildAt(i); } View newView = mAdapter.getView(i, oldView, viewGroup); if (oldView == null) { // We ran out of existing views. Add it at the end. viewGroup.addView(newView); } else if (oldView != newView) { // We couldn't rebind the view. Replace it. viewGroup.removeViewAt(i); viewGroup.addView(newView, i); } } else { int lastIndex = viewGroup.getChildCount() - 1; viewGroup.removeViewAt(lastIndex); } } } @Override public void onChanged() { refresh(); } @Override public void onInvalidated() { release(); } private void release() { if (!mReleased) { mReleased = true; mAdapter.unregisterDataSetObserver(this); } } } }
3,613
1,350
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.resourcemanager.botservice.implementation; import com.azure.core.annotation.BodyParam; import com.azure.core.annotation.Delete; import com.azure.core.annotation.ExpectedResponses; import com.azure.core.annotation.Get; import com.azure.core.annotation.HeaderParam; import com.azure.core.annotation.Headers; import com.azure.core.annotation.Host; import com.azure.core.annotation.HostParam; import com.azure.core.annotation.Patch; import com.azure.core.annotation.PathParam; import com.azure.core.annotation.Post; import com.azure.core.annotation.Put; import com.azure.core.annotation.QueryParam; import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceInterface; import com.azure.core.annotation.ServiceMethod; import com.azure.core.annotation.UnexpectedResponseExceptionType; import com.azure.core.http.rest.PagedFlux; import com.azure.core.http.rest.PagedIterable; import com.azure.core.http.rest.PagedResponse; import com.azure.core.http.rest.PagedResponseBase; import com.azure.core.http.rest.Response; import com.azure.core.http.rest.RestProxy; import com.azure.core.management.exception.ManagementException; import com.azure.core.util.Context; import com.azure.core.util.FluxUtil; import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.botservice.fluent.BotsClient; import com.azure.resourcemanager.botservice.fluent.models.BotInner; import com.azure.resourcemanager.botservice.fluent.models.CheckNameAvailabilityResponseBodyInner; import com.azure.resourcemanager.botservice.models.BotResponseList; import com.azure.resourcemanager.botservice.models.CheckNameAvailabilityRequestBody; import reactor.core.publisher.Mono; /** An instance of this class provides access to all the operations defined in BotsClient. */ public final class BotsClientImpl implements BotsClient { private final ClientLogger logger = new ClientLogger(BotsClientImpl.class); /** The proxy service used to perform REST calls. */ private final BotsService service; /** The service client containing this operation class. */ private final AzureBotServiceImpl client; /** * Initializes an instance of BotsClientImpl. * * @param client the instance of the service client containing this operation class. */ BotsClientImpl(AzureBotServiceImpl client) { this.service = RestProxy.create(BotsService.class, client.getHttpPipeline(), client.getSerializerAdapter()); this.client = client; } /** * The interface defining all the services for AzureBotServiceBots to be used by the proxy service to perform REST * calls. */ @Host("{$host}") @ServiceInterface(name = "AzureBotServiceBots") private interface BotsService { @Headers({"Content-Type: application/json"}) @Put( "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BotService" + "/botServices/{resourceName}") @ExpectedResponses({200, 201}) @UnexpectedResponseExceptionType(ManagementException.class) Mono<Response<BotInner>> create( @HostParam("$host") String endpoint, @PathParam("resourceGroupName") String resourceGroupName, @PathParam("resourceName") String resourceName, @QueryParam("api-version") String apiVersion, @PathParam("subscriptionId") String subscriptionId, @BodyParam("application/json") BotInner parameters, @HeaderParam("Accept") String accept, Context context); @Headers({"Content-Type: application/json"}) @Patch( "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BotService" + "/botServices/{resourceName}") @ExpectedResponses({200, 201}) @UnexpectedResponseExceptionType(ManagementException.class) Mono<Response<BotInner>> update( @HostParam("$host") String endpoint, @PathParam("resourceGroupName") String resourceGroupName, @PathParam("resourceName") String resourceName, @QueryParam("api-version") String apiVersion, @PathParam("subscriptionId") String subscriptionId, @BodyParam("application/json") BotInner parameters, @HeaderParam("Accept") String accept, Context context); @Headers({"Content-Type: application/json"}) @Delete( "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BotService" + "/botServices/{resourceName}") @ExpectedResponses({200, 204}) @UnexpectedResponseExceptionType(ManagementException.class) Mono<Response<Void>> delete( @HostParam("$host") String endpoint, @PathParam("resourceGroupName") String resourceGroupName, @PathParam("resourceName") String resourceName, @QueryParam("api-version") String apiVersion, @PathParam("subscriptionId") String subscriptionId, @HeaderParam("Accept") String accept, Context context); @Headers({"Content-Type: application/json"}) @Get( "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BotService" + "/botServices/{resourceName}") @ExpectedResponses({200}) @UnexpectedResponseExceptionType(ManagementException.class) Mono<Response<BotInner>> getByResourceGroup( @HostParam("$host") String endpoint, @PathParam("resourceGroupName") String resourceGroupName, @PathParam("resourceName") String resourceName, @QueryParam("api-version") String apiVersion, @PathParam("subscriptionId") String subscriptionId, @HeaderParam("Accept") String accept, Context context); @Headers({"Content-Type: application/json"}) @Get( "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BotService" + "/botServices") @ExpectedResponses({200}) @UnexpectedResponseExceptionType(ManagementException.class) Mono<Response<BotResponseList>> listByResourceGroup( @HostParam("$host") String endpoint, @PathParam("resourceGroupName") String resourceGroupName, @PathParam("subscriptionId") String subscriptionId, @QueryParam("api-version") String apiVersion, @HeaderParam("Accept") String accept, Context context); @Headers({"Content-Type: application/json"}) @Get("/subscriptions/{subscriptionId}/providers/Microsoft.BotService/botServices") @ExpectedResponses({200}) @UnexpectedResponseExceptionType(ManagementException.class) Mono<Response<BotResponseList>> list( @HostParam("$host") String endpoint, @QueryParam("api-version") String apiVersion, @PathParam("subscriptionId") String subscriptionId, @HeaderParam("Accept") String accept, Context context); @Headers({"Content-Type: application/json"}) @Post("/providers/Microsoft.BotService/checkNameAvailability") @ExpectedResponses({200}) @UnexpectedResponseExceptionType(ManagementException.class) Mono<Response<CheckNameAvailabilityResponseBodyInner>> getCheckNameAvailability( @HostParam("$host") String endpoint, @QueryParam("api-version") String apiVersion, @BodyParam("application/json") CheckNameAvailabilityRequestBody parameters, @HeaderParam("Accept") String accept, Context context); @Headers({"Content-Type: application/json"}) @Get("{nextLink}") @ExpectedResponses({200}) @UnexpectedResponseExceptionType(ManagementException.class) Mono<Response<BotResponseList>> listByResourceGroupNext( @PathParam(value = "nextLink", encoded = true) String nextLink, @HostParam("$host") String endpoint, @HeaderParam("Accept") String accept, Context context); @Headers({"Content-Type: application/json"}) @Get("{nextLink}") @ExpectedResponses({200}) @UnexpectedResponseExceptionType(ManagementException.class) Mono<Response<BotResponseList>> listNext( @PathParam(value = "nextLink", encoded = true) String nextLink, @HostParam("$host") String endpoint, @HeaderParam("Accept") String accept, Context context); } /** * Creates a Bot Service. Bot Service is a resource group wide resource type. * * @param resourceGroupName The name of the Bot resource group in the user subscription. * @param resourceName The name of the Bot resource. * @param parameters The parameters to provide for the created bot. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return bot resource definition. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Response<BotInner>> createWithResponseAsync( String resourceGroupName, String resourceName, BotInner parameters) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); } if (resourceName == null) { return Mono.error(new IllegalArgumentException("Parameter resourceName is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } if (parameters == null) { return Mono.error(new IllegalArgumentException("Parameter parameters is required and cannot be null.")); } else { parameters.validate(); } final String accept = "application/json"; return FluxUtil .withContext( context -> service .create( this.client.getEndpoint(), resourceGroupName, resourceName, this.client.getApiVersion(), this.client.getSubscriptionId(), parameters, accept, context)) .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); } /** * Creates a Bot Service. Bot Service is a resource group wide resource type. * * @param resourceGroupName The name of the Bot resource group in the user subscription. * @param resourceName The name of the Bot resource. * @param parameters The parameters to provide for the created bot. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return bot resource definition. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Response<BotInner>> createWithResponseAsync( String resourceGroupName, String resourceName, BotInner parameters, Context context) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); } if (resourceName == null) { return Mono.error(new IllegalArgumentException("Parameter resourceName is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } if (parameters == null) { return Mono.error(new IllegalArgumentException("Parameter parameters is required and cannot be null.")); } else { parameters.validate(); } final String accept = "application/json"; context = this.client.mergeContext(context); return service .create( this.client.getEndpoint(), resourceGroupName, resourceName, this.client.getApiVersion(), this.client.getSubscriptionId(), parameters, accept, context); } /** * Creates a Bot Service. Bot Service is a resource group wide resource type. * * @param resourceGroupName The name of the Bot resource group in the user subscription. * @param resourceName The name of the Bot resource. * @param parameters The parameters to provide for the created bot. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return bot resource definition. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<BotInner> createAsync(String resourceGroupName, String resourceName, BotInner parameters) { return createWithResponseAsync(resourceGroupName, resourceName, parameters) .flatMap( (Response<BotInner> res) -> { if (res.getValue() != null) { return Mono.just(res.getValue()); } else { return Mono.empty(); } }); } /** * Creates a Bot Service. Bot Service is a resource group wide resource type. * * @param resourceGroupName The name of the Bot resource group in the user subscription. * @param resourceName The name of the Bot resource. * @param parameters The parameters to provide for the created bot. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return bot resource definition. */ @ServiceMethod(returns = ReturnType.SINGLE) public BotInner create(String resourceGroupName, String resourceName, BotInner parameters) { return createAsync(resourceGroupName, resourceName, parameters).block(); } /** * Creates a Bot Service. Bot Service is a resource group wide resource type. * * @param resourceGroupName The name of the Bot resource group in the user subscription. * @param resourceName The name of the Bot resource. * @param parameters The parameters to provide for the created bot. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return bot resource definition. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<BotInner> createWithResponse( String resourceGroupName, String resourceName, BotInner parameters, Context context) { return createWithResponseAsync(resourceGroupName, resourceName, parameters, context).block(); } /** * Updates a Bot Service. * * @param resourceGroupName The name of the Bot resource group in the user subscription. * @param resourceName The name of the Bot resource. * @param parameters The parameters to provide for the created bot. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return bot resource definition. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Response<BotInner>> updateWithResponseAsync( String resourceGroupName, String resourceName, BotInner parameters) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); } if (resourceName == null) { return Mono.error(new IllegalArgumentException("Parameter resourceName is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } if (parameters == null) { return Mono.error(new IllegalArgumentException("Parameter parameters is required and cannot be null.")); } else { parameters.validate(); } final String accept = "application/json"; return FluxUtil .withContext( context -> service .update( this.client.getEndpoint(), resourceGroupName, resourceName, this.client.getApiVersion(), this.client.getSubscriptionId(), parameters, accept, context)) .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); } /** * Updates a Bot Service. * * @param resourceGroupName The name of the Bot resource group in the user subscription. * @param resourceName The name of the Bot resource. * @param parameters The parameters to provide for the created bot. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return bot resource definition. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Response<BotInner>> updateWithResponseAsync( String resourceGroupName, String resourceName, BotInner parameters, Context context) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); } if (resourceName == null) { return Mono.error(new IllegalArgumentException("Parameter resourceName is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } if (parameters == null) { return Mono.error(new IllegalArgumentException("Parameter parameters is required and cannot be null.")); } else { parameters.validate(); } final String accept = "application/json"; context = this.client.mergeContext(context); return service .update( this.client.getEndpoint(), resourceGroupName, resourceName, this.client.getApiVersion(), this.client.getSubscriptionId(), parameters, accept, context); } /** * Updates a Bot Service. * * @param resourceGroupName The name of the Bot resource group in the user subscription. * @param resourceName The name of the Bot resource. * @param parameters The parameters to provide for the created bot. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return bot resource definition. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<BotInner> updateAsync(String resourceGroupName, String resourceName, BotInner parameters) { return updateWithResponseAsync(resourceGroupName, resourceName, parameters) .flatMap( (Response<BotInner> res) -> { if (res.getValue() != null) { return Mono.just(res.getValue()); } else { return Mono.empty(); } }); } /** * Updates a Bot Service. * * @param resourceGroupName The name of the Bot resource group in the user subscription. * @param resourceName The name of the Bot resource. * @param parameters The parameters to provide for the created bot. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return bot resource definition. */ @ServiceMethod(returns = ReturnType.SINGLE) public BotInner update(String resourceGroupName, String resourceName, BotInner parameters) { return updateAsync(resourceGroupName, resourceName, parameters).block(); } /** * Updates a Bot Service. * * @param resourceGroupName The name of the Bot resource group in the user subscription. * @param resourceName The name of the Bot resource. * @param parameters The parameters to provide for the created bot. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return bot resource definition. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<BotInner> updateWithResponse( String resourceGroupName, String resourceName, BotInner parameters, Context context) { return updateWithResponseAsync(resourceGroupName, resourceName, parameters, context).block(); } /** * Deletes a Bot Service from the resource group. * * @param resourceGroupName The name of the Bot resource group in the user subscription. * @param resourceName The name of the Bot resource. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the completion. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Response<Void>> deleteWithResponseAsync(String resourceGroupName, String resourceName) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); } if (resourceName == null) { return Mono.error(new IllegalArgumentException("Parameter resourceName is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } final String accept = "application/json"; return FluxUtil .withContext( context -> service .delete( this.client.getEndpoint(), resourceGroupName, resourceName, this.client.getApiVersion(), this.client.getSubscriptionId(), accept, context)) .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); } /** * Deletes a Bot Service from the resource group. * * @param resourceGroupName The name of the Bot resource group in the user subscription. * @param resourceName The name of the Bot resource. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the completion. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Response<Void>> deleteWithResponseAsync( String resourceGroupName, String resourceName, Context context) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); } if (resourceName == null) { return Mono.error(new IllegalArgumentException("Parameter resourceName is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } final String accept = "application/json"; context = this.client.mergeContext(context); return service .delete( this.client.getEndpoint(), resourceGroupName, resourceName, this.client.getApiVersion(), this.client.getSubscriptionId(), accept, context); } /** * Deletes a Bot Service from the resource group. * * @param resourceGroupName The name of the Bot resource group in the user subscription. * @param resourceName The name of the Bot resource. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the completion. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Void> deleteAsync(String resourceGroupName, String resourceName) { return deleteWithResponseAsync(resourceGroupName, resourceName).flatMap((Response<Void> res) -> Mono.empty()); } /** * Deletes a Bot Service from the resource group. * * @param resourceGroupName The name of the Bot resource group in the user subscription. * @param resourceName The name of the Bot resource. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. */ @ServiceMethod(returns = ReturnType.SINGLE) public void delete(String resourceGroupName, String resourceName) { deleteAsync(resourceGroupName, resourceName).block(); } /** * Deletes a Bot Service from the resource group. * * @param resourceGroupName The name of the Bot resource group in the user subscription. * @param resourceName The name of the Bot resource. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the response. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<Void> deleteWithResponse(String resourceGroupName, String resourceName, Context context) { return deleteWithResponseAsync(resourceGroupName, resourceName, context).block(); } /** * Returns a BotService specified by the parameters. * * @param resourceGroupName The name of the Bot resource group in the user subscription. * @param resourceName The name of the Bot resource. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return bot resource definition. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Response<BotInner>> getByResourceGroupWithResponseAsync( String resourceGroupName, String resourceName) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); } if (resourceName == null) { return Mono.error(new IllegalArgumentException("Parameter resourceName is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } final String accept = "application/json"; return FluxUtil .withContext( context -> service .getByResourceGroup( this.client.getEndpoint(), resourceGroupName, resourceName, this.client.getApiVersion(), this.client.getSubscriptionId(), accept, context)) .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); } /** * Returns a BotService specified by the parameters. * * @param resourceGroupName The name of the Bot resource group in the user subscription. * @param resourceName The name of the Bot resource. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return bot resource definition. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Response<BotInner>> getByResourceGroupWithResponseAsync( String resourceGroupName, String resourceName, Context context) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); } if (resourceName == null) { return Mono.error(new IllegalArgumentException("Parameter resourceName is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } final String accept = "application/json"; context = this.client.mergeContext(context); return service .getByResourceGroup( this.client.getEndpoint(), resourceGroupName, resourceName, this.client.getApiVersion(), this.client.getSubscriptionId(), accept, context); } /** * Returns a BotService specified by the parameters. * * @param resourceGroupName The name of the Bot resource group in the user subscription. * @param resourceName The name of the Bot resource. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return bot resource definition. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<BotInner> getByResourceGroupAsync(String resourceGroupName, String resourceName) { return getByResourceGroupWithResponseAsync(resourceGroupName, resourceName) .flatMap( (Response<BotInner> res) -> { if (res.getValue() != null) { return Mono.just(res.getValue()); } else { return Mono.empty(); } }); } /** * Returns a BotService specified by the parameters. * * @param resourceGroupName The name of the Bot resource group in the user subscription. * @param resourceName The name of the Bot resource. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return bot resource definition. */ @ServiceMethod(returns = ReturnType.SINGLE) public BotInner getByResourceGroup(String resourceGroupName, String resourceName) { return getByResourceGroupAsync(resourceGroupName, resourceName).block(); } /** * Returns a BotService specified by the parameters. * * @param resourceGroupName The name of the Bot resource group in the user subscription. * @param resourceName The name of the Bot resource. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return bot resource definition. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<BotInner> getByResourceGroupWithResponse( String resourceGroupName, String resourceName, Context context) { return getByResourceGroupWithResponseAsync(resourceGroupName, resourceName, context).block(); } /** * Returns all the resources of a particular type belonging to a resource group. * * @param resourceGroupName The name of the Bot resource group in the user subscription. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the list of bot service operation response. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<BotInner>> listByResourceGroupSinglePageAsync(String resourceGroupName) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } final String accept = "application/json"; return FluxUtil .withContext( context -> service .listByResourceGroup( this.client.getEndpoint(), resourceGroupName, this.client.getSubscriptionId(), this.client.getApiVersion(), accept, context)) .<PagedResponse<BotInner>>map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)) .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); } /** * Returns all the resources of a particular type belonging to a resource group. * * @param resourceGroupName The name of the Bot resource group in the user subscription. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the list of bot service operation response. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<BotInner>> listByResourceGroupSinglePageAsync( String resourceGroupName, Context context) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } final String accept = "application/json"; context = this.client.mergeContext(context); return service .listByResourceGroup( this.client.getEndpoint(), resourceGroupName, this.client.getSubscriptionId(), this.client.getApiVersion(), accept, context) .map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)); } /** * Returns all the resources of a particular type belonging to a resource group. * * @param resourceGroupName The name of the Bot resource group in the user subscription. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the list of bot service operation response. */ @ServiceMethod(returns = ReturnType.COLLECTION) private PagedFlux<BotInner> listByResourceGroupAsync(String resourceGroupName) { return new PagedFlux<>( () -> listByResourceGroupSinglePageAsync(resourceGroupName), nextLink -> listByResourceGroupNextSinglePageAsync(nextLink)); } /** * Returns all the resources of a particular type belonging to a resource group. * * @param resourceGroupName The name of the Bot resource group in the user subscription. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the list of bot service operation response. */ @ServiceMethod(returns = ReturnType.COLLECTION) private PagedFlux<BotInner> listByResourceGroupAsync(String resourceGroupName, Context context) { return new PagedFlux<>( () -> listByResourceGroupSinglePageAsync(resourceGroupName, context), nextLink -> listByResourceGroupNextSinglePageAsync(nextLink, context)); } /** * Returns all the resources of a particular type belonging to a resource group. * * @param resourceGroupName The name of the Bot resource group in the user subscription. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the list of bot service operation response. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<BotInner> listByResourceGroup(String resourceGroupName) { return new PagedIterable<>(listByResourceGroupAsync(resourceGroupName)); } /** * Returns all the resources of a particular type belonging to a resource group. * * @param resourceGroupName The name of the Bot resource group in the user subscription. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the list of bot service operation response. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<BotInner> listByResourceGroup(String resourceGroupName, Context context) { return new PagedIterable<>(listByResourceGroupAsync(resourceGroupName, context)); } /** * Returns all the resources of a particular type belonging to a subscription. * * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the list of bot service operation response. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<BotInner>> listSinglePageAsync() { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } final String accept = "application/json"; return FluxUtil .withContext( context -> service .list( this.client.getEndpoint(), this.client.getApiVersion(), this.client.getSubscriptionId(), accept, context)) .<PagedResponse<BotInner>>map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)) .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); } /** * Returns all the resources of a particular type belonging to a subscription. * * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the list of bot service operation response. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<BotInner>> listSinglePageAsync(Context context) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } final String accept = "application/json"; context = this.client.mergeContext(context); return service .list( this.client.getEndpoint(), this.client.getApiVersion(), this.client.getSubscriptionId(), accept, context) .map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)); } /** * Returns all the resources of a particular type belonging to a subscription. * * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the list of bot service operation response. */ @ServiceMethod(returns = ReturnType.COLLECTION) private PagedFlux<BotInner> listAsync() { return new PagedFlux<>(() -> listSinglePageAsync(), nextLink -> listNextSinglePageAsync(nextLink)); } /** * Returns all the resources of a particular type belonging to a subscription. * * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the list of bot service operation response. */ @ServiceMethod(returns = ReturnType.COLLECTION) private PagedFlux<BotInner> listAsync(Context context) { return new PagedFlux<>( () -> listSinglePageAsync(context), nextLink -> listNextSinglePageAsync(nextLink, context)); } /** * Returns all the resources of a particular type belonging to a subscription. * * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the list of bot service operation response. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<BotInner> list() { return new PagedIterable<>(listAsync()); } /** * Returns all the resources of a particular type belonging to a subscription. * * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the list of bot service operation response. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<BotInner> list(Context context) { return new PagedIterable<>(listAsync(context)); } /** * Check whether a bot name is available. * * @param parameters The request body parameters to provide for the check name availability request. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the response body returned for a request to Bot Service Management to check availability of a bot name. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Response<CheckNameAvailabilityResponseBodyInner>> getCheckNameAvailabilityWithResponseAsync( CheckNameAvailabilityRequestBody parameters) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (parameters == null) { return Mono.error(new IllegalArgumentException("Parameter parameters is required and cannot be null.")); } else { parameters.validate(); } final String accept = "application/json"; return FluxUtil .withContext( context -> service .getCheckNameAvailability( this.client.getEndpoint(), this.client.getApiVersion(), parameters, accept, context)) .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); } /** * Check whether a bot name is available. * * @param parameters The request body parameters to provide for the check name availability request. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the response body returned for a request to Bot Service Management to check availability of a bot name. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Response<CheckNameAvailabilityResponseBodyInner>> getCheckNameAvailabilityWithResponseAsync( CheckNameAvailabilityRequestBody parameters, Context context) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (parameters == null) { return Mono.error(new IllegalArgumentException("Parameter parameters is required and cannot be null.")); } else { parameters.validate(); } final String accept = "application/json"; context = this.client.mergeContext(context); return service .getCheckNameAvailability( this.client.getEndpoint(), this.client.getApiVersion(), parameters, accept, context); } /** * Check whether a bot name is available. * * @param parameters The request body parameters to provide for the check name availability request. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the response body returned for a request to Bot Service Management to check availability of a bot name. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<CheckNameAvailabilityResponseBodyInner> getCheckNameAvailabilityAsync( CheckNameAvailabilityRequestBody parameters) { return getCheckNameAvailabilityWithResponseAsync(parameters) .flatMap( (Response<CheckNameAvailabilityResponseBodyInner> res) -> { if (res.getValue() != null) { return Mono.just(res.getValue()); } else { return Mono.empty(); } }); } /** * Check whether a bot name is available. * * @param parameters The request body parameters to provide for the check name availability request. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the response body returned for a request to Bot Service Management to check availability of a bot name. */ @ServiceMethod(returns = ReturnType.SINGLE) public CheckNameAvailabilityResponseBodyInner getCheckNameAvailability( CheckNameAvailabilityRequestBody parameters) { return getCheckNameAvailabilityAsync(parameters).block(); } /** * Check whether a bot name is available. * * @param parameters The request body parameters to provide for the check name availability request. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the response body returned for a request to Bot Service Management to check availability of a bot name. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<CheckNameAvailabilityResponseBodyInner> getCheckNameAvailabilityWithResponse( CheckNameAvailabilityRequestBody parameters, Context context) { return getCheckNameAvailabilityWithResponseAsync(parameters, context).block(); } /** * Get the next page of items. * * @param nextLink The nextLink parameter. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the list of bot service operation response. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<BotInner>> listByResourceGroupNextSinglePageAsync(String nextLink) { if (nextLink == null) { return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null.")); } if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } final String accept = "application/json"; return FluxUtil .withContext( context -> service.listByResourceGroupNext(nextLink, this.client.getEndpoint(), accept, context)) .<PagedResponse<BotInner>>map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)) .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); } /** * Get the next page of items. * * @param nextLink The nextLink parameter. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the list of bot service operation response. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<BotInner>> listByResourceGroupNextSinglePageAsync(String nextLink, Context context) { if (nextLink == null) { return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null.")); } if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } final String accept = "application/json"; context = this.client.mergeContext(context); return service .listByResourceGroupNext(nextLink, this.client.getEndpoint(), accept, context) .map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)); } /** * Get the next page of items. * * @param nextLink The nextLink parameter. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the list of bot service operation response. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<BotInner>> listNextSinglePageAsync(String nextLink) { if (nextLink == null) { return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null.")); } if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } final String accept = "application/json"; return FluxUtil .withContext(context -> service.listNext(nextLink, this.client.getEndpoint(), accept, context)) .<PagedResponse<BotInner>>map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)) .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); } /** * Get the next page of items. * * @param nextLink The nextLink parameter. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the list of bot service operation response. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<BotInner>> listNextSinglePageAsync(String nextLink, Context context) { if (nextLink == null) { return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null.")); } if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } final String accept = "application/json"; context = this.client.mergeContext(context); return service .listNext(nextLink, this.client.getEndpoint(), accept, context) .map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)); } }
25,347
32,544
package com.baeldung.o; import java.security.InvalidParameterException; public class Calculator { public void calculate(CalculatorOperation operation) { if (operation == null) { throw new InvalidParameterException("Can not perform operation"); } operation.perform(); } }
110
758
<reponame>Mr-ssk/Spika<filename>Android/SpikaChatModule/app/src/main/java/com/clover_studio/spikachatmodule/view/menu/OnMenuButtonsListener.java package com.clover_studio.spikachatmodule.view.menu; /** * Created by ubuntu_ivo on 24.07.15.. */ public interface OnMenuButtonsListener { public void onCameraClicked(); public void onAudioClicked(); public void onFileClicked(); public void onContactClicked(); public void onVideoClicked(); public void onLocationClicked(); public void onGalleryClicked(); }
185
1,253
//k-means clustering for tabular data having different attributes #include <iostream> #include <vector> #include <cmath> using namespace std; int main() { int n, nAttr, nClusters; cout<<"Enter number of entries, attributes and clusters : "; cin>>n>>nAttr>>nClusters; vector< vector<double>> entries(n, vector<double>(nAttr)); //Read data about objects for(int i=0; i<n; i++) { for(int j=0; j<nAttr; j++) { cin>>entries[i][j]; } } //To stor cluster centroids vector< vector<double>> cluster(nClusters, vector<double>(nAttr)); //Read data about centroids of clusters cout<<"Enter centroids of "<<nClusters<<" clusters : "; for(int i=0; i<nClusters; i++) { for(int j=0; j<nAttr; j++) { cin>>cluster[i][j]; } } vector<vector<double>> oldCluster = cluster; //To classify objects to clusters vector<int> groups(n); do { for(int i=0; i<n; i++) //For each object { vector<double> dist(nClusters, 0); cout<<"For object : "<<i<<endl; for(int c=0;c<nClusters; c++) //For each cluster { double tempdist = 0; for(int j=0; j<nAttr; j++) //For each attribute { tempdist += pow((entries[i][j] - cluster[c][j]), 2); } dist[c] = sqrt(tempdist); } int minidx = 0; for(int k=0; k<nClusters; k++) { cout<<"Cluster : "<<k<<" DIstance : "<<dist[k]<<endl; if(dist[k] < dist[minidx]) { minidx = k; } } groups[i] = minidx; } vector<int> count(nClusters, 0); //To store count of objects in each cluster for(int i=0; i<n; i++) { count[groups[i]]++; } oldCluster = cluster; //Reset new cluster to 0 for(int i=0; i<nClusters; i++) { for(int j=0; j<nAttr; j++) { cluster[i][j] = 0; } } //Calculate new centroid for each cluster for(int i=0; i<n; i++) { int clusterno = groups[i]; for(int j=0; j<nAttr; j++) { cluster[clusterno][j] += entries[i][j]; } } for(int i=0; i<nClusters; i++) { for(int j=0; j<nAttr; j++) { cluster[i][j] /= count[i]; } } }while(oldCluster != cluster); for(int i=0; i<n; i++) { cout<<"Group "<<i<<" == "<<groups[i]<<endl; } //Old cluster for(int i=0; i<nClusters; i++) { for(int j=0; j<nAttr; j++) { cout<<oldCluster[i][j]<<"\t"; } cout<<endl; } cout<<"New cluster "<<endl; //New Cluster for(int i=0; i<nClusters; i++) { for(int j=0; j<nAttr; j++) { cout<<cluster[i][j]<<"\t"; } cout<<endl; } return 0; }
1,234
469
{ "continue": "Continuer", "copy": "Copier", "copySeed": "Copier la phrase secrète", "createAccount": "Créer un compte", "goBack": "Retour", "notHaveAccounts": "Vous n'avez pas de comptes enregistrés", "nothingHere": "Il n'y a rien ici...", "password": { "error": "Mot de passe <PASSWORD> !" }, "userList": { "address": "Adresse / Nom du compte", "buttons": { "login": "Connexion" }, "exportUser": "Exportez votre compte", "notHaveAccount": "Veuillez vous connecter pour continuer", "password": "<PASSWORD>", "placeholders": { "password": "<PASSWORD>" }, "title": "Bon retour parmi nous" }, "warn": "Vu que vous êtes seul à contrôler votre argent, vous devrez enregistrer votre phrase de sauvegarde dans le cas où cette application est supprimée ou", "yourSeed": "Votre phrase secrète" }
439
988
<reponame>AlexeyMochalov/firebird<filename>examples/extauth/TcWrapper.cpp<gh_stars>100-1000 /* * Tomcrypt library <= firebird : c++ wrapper. * * The contents of this file are subject to the Initial * Developer's Public License Version 1.0 (the "License"); * you may not use this file except in compliance with the * License. You may obtain a copy of the License at * https://www.firebirdsql.org/en/initial-developer-s-public-license-version-1-0/ * * Software distributed under the License is distributed AS IS, * WITHOUT WARRANTY OF ANY KIND, either express or implied. * See the License for the specific language governing rights * and limitations under the License. * * The Original Code was created by <NAME> * for the Firebird Open Source RDBMS project. * * Copyright (c) 2020 <NAME> <<EMAIL>> * and all contributors signed below. * * All Rights Reserved. * Contributor(s): ______________________________________. */ #include "TcWrapper.h" namespace { // LTC hack class GInit { public: GInit() { ltc_mp = ltm_desc; } }; GInit gInit; } void error(ThrowStatusWrapper* status, const char* text) { if (! status) throw text; ISC_STATUS_ARRAY v; v[0] = isc_arg_gds; v[1] = isc_random; v[2] = isc_arg_string; v[3] = (ISC_STATUS) text; v[4] = isc_arg_end; throw FbException(status, v); } void check(ThrowStatusWrapper* status, int err, const char* text) { if (err == CRYPT_OK) return; char buf[256]; sprintf(buf, "%s: %s", text, error_to_string(err)); error(status, buf); } unsigned readHexKey(ThrowStatusWrapper* status, const char* hex, unsigned char* key, unsigned bufSize) { unsigned char* k = key; const char* const end = hex + strlen(hex) - 1; for (const char* s = hex; s < end; s += 2) { if (k - key >= bufSize) break; // FF char ss[3]; ss[0] = s[0]; ss[1] = s[1]; ss[2] = 0; unsigned c = strtoul(ss, NULL, 16); if (c > 255) error(status, "Key format error"); *k++ = static_cast<unsigned char>(c); } return k - key; } void PseudoRandom::init(ThrowStatusWrapper* status) { // LTC hack ltc_mp = ltm_desc; // register yarrow index = register_prng(&yarrow_desc); if (index == -1) error(status, "Error registering PRNG yarrow"); // setup the PRNG check(status, yarrow_start(&state), "Error starting PRNG yarrow"); check(status, rng_make_prng(64, index, &state, NULL), "Error setting up PRNG yarrow"); } void PseudoRandom::fini() { yarrow_done(&state); } const PseudoRandom::PrngDescriptor* PseudoRandom::getDsc() { return &yarrow_desc; } void Hash::init(ThrowStatusWrapper* status, const ltc_hash_descriptor* desc) { // LTC hack ltc_mp = ltm_desc; /* register SHA256 */ index = register_hash(desc); if (index == -1) error(status, "Error registering SHA256"); }
1,039
1,724
<reponame>GeGuNa/skift_oss2 #include <libgraphic/Painter.h> #include <libwidget/Elements.h> #include <libwidget/Layouts.h> namespace Widget { void ButtonElement::paint(Graphic::Painter &painter, const Math::Recti &rectangle) { UNUSED(rectangle); if (enabled()) { if (_style == OUTLINE) { painter.draw_rectangle_rounded(bound(), 4, 1, color(THEME_BORDER)); } else if (_style == FILLED) { painter.fill_rectangle_rounded(bound(), 4, color(THEME_ACCENT)); } if (_mouse_over) { painter.fill_rectangle_rounded(bound(), 4, color(THEME_FOREGROUND).with_alpha(0.1)); } if (_mouse_press) { painter.fill_rectangle_rounded(bound(), 4, color(THEME_FOREGROUND).with_alpha(0.1)); } } } void ButtonElement::event(Event *event) { if (event->type == Event::MOUSE_ENTER) { _mouse_over = true; should_repaint(); event->accepted = true; } else if (event->type == Event::MOUSE_LEAVE) { _mouse_over = false; should_repaint(); event->accepted = true; } else if (event->type == Event::MOUSE_BUTTON_PRESS) { _mouse_press = true; should_repaint(); event->accepted = true; } else if (event->type == Event::MOUSE_BUTTON_RELEASE) { _mouse_press = false; Event action_event = {}; action_event.type = Event::ACTION; dispatch_event(&action_event); should_repaint(); event->accepted = true; } else if (event->type == Event::WIDGET_DISABLE) { _mouse_over = false; _mouse_press = false; } } ButtonElement::ButtonElement(Style style) : _style{style} { min_height(36); flags(Element::GREEDY); } Ref<ButtonElement> button(ButtonElement::Style style, Ref<Element> child, Func<void(void)> on_click) { auto button = make<ButtonElement>(style); button->add(fill(child)); button->min_width(36); button->min_height(36); if (on_click) { button->on(Event::ACTION, [on_click](auto) { on_click(); }); } return button; } /* --- Basic Button --------------------------------------------------------- */ Ref<ButtonElement> basic_button(Ref<Element> child, Func<void(void)> on_click) { return button( ButtonElement::TEXT, child, on_click); } Ref<ButtonElement> basic_button(Ref<Graphic::Icon> icon, Func<void(void)> on_click) { return button( ButtonElement::TEXT, spacing({0, 9}, Widget::icon(icon)), on_click); } Ref<ButtonElement> basic_button(String text, Func<void(void)> on_click) { return button( ButtonElement::TEXT, spacing({0, 16}, Widget::label(text, Math::Anchor::CENTER)), on_click); } Ref<ButtonElement> basic_button(Ref<Graphic::Icon> icon, String text, Func<void(void)> on_click) { return button( ButtonElement::TEXT, spacing({0, 0, 12, 16}, Widget::hflow({ spacing({0, 0, 0, 8}, Widget::icon(icon)), Widget::label(text), })), on_click); } Ref<ButtonElement> basic_button(Ref<Graphic::Bitmap> image, String text, Func<void(void)> on_click) { return button( ButtonElement::TEXT, spacing({0, 0, 12, 16}, Widget::hflow({ spacing({0, 0, 0, 8}, Widget::image(image, Graphic::BitmapScaling::CENTER)), Widget::label(text), })), on_click); } /* --- Outlined Button ------------------------------------------------------ */ Ref<ButtonElement> outline_button(Ref<Element> child, Func<void(void)> on_click) { return button( ButtonElement::OUTLINE, child, on_click); } Ref<ButtonElement> outline_button(Ref<Graphic::Icon> icon, Func<void(void)> on_click) { return button( ButtonElement::OUTLINE, spacing({0, 9}, Widget::icon(icon)), on_click); } Ref<ButtonElement> outline_button(String text, Func<void(void)> on_click) { return button( ButtonElement::OUTLINE, spacing({0, 16}, Widget::label(text, Math::Anchor::CENTER)), on_click); } Ref<ButtonElement> outline_button(Ref<Graphic::Icon> icon, String text, Func<void(void)> on_click) { return button( ButtonElement::OUTLINE, spacing({0, 0, 12, 16}, Widget::hflow({ spacing({0, 0, 0, 8}, Widget::icon(icon)), Widget::label(text), })), on_click); } /* --- Filled Button -------------------------------------------------------- */ Ref<ButtonElement> filled_button(Ref<Element> child, Func<void(void)> on_click) { return button( ButtonElement::FILLED, child, on_click); } Ref<ButtonElement> filled_button(Ref<Graphic::Icon> icon, Func<void(void)> on_click) { return button( ButtonElement::FILLED, spacing({0, 9}, Widget::icon(icon)), on_click); } Ref<ButtonElement> filled_button(String text, Func<void(void)> on_click) { return button( ButtonElement::FILLED, spacing({0, 16}, Widget::label(text)), on_click); } Ref<ButtonElement> filled_button(Ref<Graphic::Icon> icon, String text, Func<void(void)> on_click) { return button( ButtonElement::FILLED, spacing({0, 0, 12, 16}, Widget::hflow({ spacing({0, 0, 0, 8}, Widget::icon(icon)), Widget::label(text), })), on_click); } } // namespace Widget
2,627
640
<filename>include/_DEVELOPMENT/sdcc/arch/zxn/nirvana-.h // automatically generated by m4 from headers in proto subdir #include <arch/zx/nirvana-.h>
51
1,350
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.resourcemanager.recoveryservicesbackup.models; import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** Azure Storage Account workload-specific container. */ @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "containerType") @JsonTypeName("StorageContainer") @Fluent public final class AzureStorageContainer extends ProtectionContainer { @JsonIgnore private final ClientLogger logger = new ClientLogger(AzureStorageContainer.class); /* * Fully qualified ARM url. */ @JsonProperty(value = "sourceResourceId") private String sourceResourceId; /* * Storage account version. */ @JsonProperty(value = "storageAccountVersion") private String storageAccountVersion; /* * Resource group name of Recovery Services Vault. */ @JsonProperty(value = "resourceGroup") private String resourceGroup; /* * Number of items backed up in this container. */ @JsonProperty(value = "protectedItemCount") private Long protectedItemCount; /** * Get the sourceResourceId property: Fully qualified ARM url. * * @return the sourceResourceId value. */ public String sourceResourceId() { return this.sourceResourceId; } /** * Set the sourceResourceId property: Fully qualified ARM url. * * @param sourceResourceId the sourceResourceId value to set. * @return the AzureStorageContainer object itself. */ public AzureStorageContainer withSourceResourceId(String sourceResourceId) { this.sourceResourceId = sourceResourceId; return this; } /** * Get the storageAccountVersion property: Storage account version. * * @return the storageAccountVersion value. */ public String storageAccountVersion() { return this.storageAccountVersion; } /** * Set the storageAccountVersion property: Storage account version. * * @param storageAccountVersion the storageAccountVersion value to set. * @return the AzureStorageContainer object itself. */ public AzureStorageContainer withStorageAccountVersion(String storageAccountVersion) { this.storageAccountVersion = storageAccountVersion; return this; } /** * Get the resourceGroup property: Resource group name of Recovery Services Vault. * * @return the resourceGroup value. */ public String resourceGroup() { return this.resourceGroup; } /** * Set the resourceGroup property: Resource group name of Recovery Services Vault. * * @param resourceGroup the resourceGroup value to set. * @return the AzureStorageContainer object itself. */ public AzureStorageContainer withResourceGroup(String resourceGroup) { this.resourceGroup = resourceGroup; return this; } /** * Get the protectedItemCount property: Number of items backed up in this container. * * @return the protectedItemCount value. */ public Long protectedItemCount() { return this.protectedItemCount; } /** * Set the protectedItemCount property: Number of items backed up in this container. * * @param protectedItemCount the protectedItemCount value to set. * @return the AzureStorageContainer object itself. */ public AzureStorageContainer withProtectedItemCount(Long protectedItemCount) { this.protectedItemCount = protectedItemCount; return this; } /** {@inheritDoc} */ @Override public AzureStorageContainer withFriendlyName(String friendlyName) { super.withFriendlyName(friendlyName); return this; } /** {@inheritDoc} */ @Override public AzureStorageContainer withBackupManagementType(BackupManagementType backupManagementType) { super.withBackupManagementType(backupManagementType); return this; } /** {@inheritDoc} */ @Override public AzureStorageContainer withRegistrationStatus(String registrationStatus) { super.withRegistrationStatus(registrationStatus); return this; } /** {@inheritDoc} */ @Override public AzureStorageContainer withHealthStatus(String healthStatus) { super.withHealthStatus(healthStatus); return this; } /** * Validates the instance. * * @throws IllegalArgumentException thrown if the instance is not valid. */ @Override public void validate() { super.validate(); } }
1,645
14,668
<gh_stars>1000+ // Copyright 2020 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "components/services/storage/service_worker/service_worker_disk_cache.h" #include "base/callback_helpers.h" #include "base/files/file_util.h" #include "base/files/scoped_temp_dir.h" #include "base/test/bind.h" #include "base/test/task_environment.h" #include "net/base/net_errors.h" #include "net/base/test_completion_callback.h" #include "net/disk_cache/disk_cache.h" #include "testing/gtest/include/gtest/gtest.h" namespace storage { class ServiceWorkerDiskCacheTest : public testing::Test { public: ServiceWorkerDiskCacheTest() = default; void SetUp() override { ASSERT_TRUE(directory_.CreateUniqueTempDir()); } void TearDown() override { FlushCacheTasks(); } void FlushCacheTasks() { disk_cache::FlushCacheThreadForTesting(); task_environment_.RunUntilIdle(); } void InitializeDiskCache(ServiceWorkerDiskCache* disk_cache) { base::RunLoop loop; disk_cache->InitWithDiskBackend(GetPath(), /*post_cleanup_callback=*/base::DoNothing(), base::BindLambdaForTesting([&](int rv) { ASSERT_EQ(rv, net::OK); loop.Quit(); })); loop.Run(); } base::FilePath GetPath() { return directory_.GetPath(); } private: base::test::TaskEnvironment task_environment_; base::ScopedTempDir directory_; }; // Tests that callbacks of operations are invoked even when these operations are // called at the same time for the same key. TEST_F(ServiceWorkerDiskCacheTest, MultipleCallsForSameKey) { auto disk_cache = std::make_unique<ServiceWorkerDiskCache>(); bool create_entry_called = false; bool open_entry_called = false; bool doom_entry_called = false; const int64_t kKey = 1; disk_cache->CreateEntry( kKey, base::BindLambdaForTesting( [&](int rv, std::unique_ptr<ServiceWorkerDiskCacheEntry>) { create_entry_called = true; })); disk_cache->OpenEntry( kKey, base::BindLambdaForTesting( [&](int rv, std::unique_ptr<ServiceWorkerDiskCacheEntry>) { open_entry_called = true; })); disk_cache->DoomEntry(kKey, base::BindLambdaForTesting( [&](int rv) { doom_entry_called = true; })); InitializeDiskCache(disk_cache.get()); FlushCacheTasks(); EXPECT_TRUE(create_entry_called); EXPECT_TRUE(open_entry_called); EXPECT_TRUE(doom_entry_called); } TEST_F(ServiceWorkerDiskCacheTest, DisablePriorToInitCompletion) { // Create an instance and start it initializing, queue up // one of each kind of "entry" function. auto disk_cache = std::make_unique<ServiceWorkerDiskCache>(); EXPECT_FALSE(disk_cache->is_disabled()); size_t callback_count = 0; auto completion_callback = base::BindLambdaForTesting([&](int rv) { EXPECT_EQ(rv, net::ERR_ABORTED); ++callback_count; }); auto entry_callback = base::BindLambdaForTesting( [&](int rv, std::unique_ptr<ServiceWorkerDiskCacheEntry> entry) { EXPECT_EQ(rv, net::ERR_ABORTED); EXPECT_FALSE(entry); ++callback_count; }); disk_cache->InitWithDiskBackend(GetPath(), /*post_cleanup_callback=*/base::DoNothing(), completion_callback); disk_cache->CreateEntry(1, entry_callback); disk_cache->OpenEntry(2, entry_callback); disk_cache->DoomEntry(3, completion_callback); // Pull the plug on all that. EXPECT_FALSE(disk_cache->is_disabled()); disk_cache->Disable(); EXPECT_TRUE(disk_cache->is_disabled()); FlushCacheTasks(); EXPECT_EQ(callback_count, 4u); // Ensure the directory can be deleted at this point. EXPECT_TRUE(base::DirectoryExists(GetPath())); EXPECT_FALSE(base::IsDirectoryEmpty(GetPath())); EXPECT_TRUE(base::DeletePathRecursively(GetPath())); EXPECT_FALSE(base::DirectoryExists(GetPath())); } TEST_F(ServiceWorkerDiskCacheTest, DisableAfterInitted) { // Create an instance and start it initializing, queue up // one of each kind of "entry" function. auto disk_cache = std::make_unique<ServiceWorkerDiskCache>(); EXPECT_FALSE(disk_cache->is_disabled()); InitializeDiskCache(disk_cache.get()); // Pull the plug disk_cache->Disable(); FlushCacheTasks(); // Ensure the directory can be deleted at this point. EXPECT_TRUE(base::DirectoryExists(GetPath())); EXPECT_FALSE(base::IsDirectoryEmpty(GetPath())); EXPECT_TRUE(base::DeletePathRecursively(GetPath())); EXPECT_FALSE(base::DirectoryExists(GetPath())); // Methods should fail. size_t callback_count = 0; auto completion_callback = base::BindLambdaForTesting([&](int rv) { EXPECT_EQ(rv, net::ERR_ABORTED); ++callback_count; }); auto entry_callback = base::BindLambdaForTesting( [&](int rv, std::unique_ptr<ServiceWorkerDiskCacheEntry> entry) { EXPECT_EQ(rv, net::ERR_ABORTED); EXPECT_FALSE(entry); ++callback_count; }); disk_cache->CreateEntry(1, entry_callback); disk_cache->OpenEntry(2, entry_callback); disk_cache->DoomEntry(3, completion_callback); FlushCacheTasks(); EXPECT_EQ(callback_count, 3u); } TEST_F(ServiceWorkerDiskCacheTest, CleanupCallback) { // Test that things delete fine when we disable the cache and wait for // the cleanup callback. net::TestClosure cleanup_done; net::TestCompletionCallback init_done; auto disk_cache = std::make_unique<ServiceWorkerDiskCache>(); EXPECT_FALSE(disk_cache->is_disabled()); disk_cache->InitWithDiskBackend(GetPath(), cleanup_done.closure(), init_done.callback()); EXPECT_EQ(net::OK, init_done.WaitForResult()); disk_cache->Disable(); cleanup_done.WaitForResult(); // Ensure the directory can be deleted at this point. EXPECT_TRUE(base::DirectoryExists(GetPath())); EXPECT_FALSE(base::IsDirectoryEmpty(GetPath())); EXPECT_TRUE(base::DeletePathRecursively(GetPath())); EXPECT_FALSE(base::DirectoryExists(GetPath())); } } // namespace storage
2,451
812
<filename>tables/log-parsers/get-predictions.py #!/usr/bin/env python # -*- coding: utf-8 -*- """Get predictions from the log file of SEMPRE.""" import sys, os, shutil, re, argparse PATTERN = re.compile(r'Pred@0000: ' r'\(derivation \(formula (.*)\)\) ' r'\(value (.*)\) ' r'\(type (.*)\)\) \[score=(.*), prob=(.*), comp=(.*)\]') def lisptree_to_python_object(charbuffer): """Convert the lisptree to Python object. Args: charbuffer: REVERSED list of characters of the lisptree string. Characters will be consumed from the list. """ c = charbuffer.pop() if c == '(': answer = [] while charbuffer[-1] != ')': if charbuffer[-1] == ' ': charbuffer.pop() else: answer.append(lisptree_to_python_object(charbuffer)) assert charbuffer.pop() == ')' return answer elif c == '"': answer = [] while charbuffer[-1] != '"': c = charbuffer.pop() if c == '\\': answer.append(charbuffer.pop()) else: answer.append(c) assert charbuffer.pop() == '"' return ''.join(answer) else: answer = [c if c != '\\' else charbuffer.pop()] while charbuffer[-1] not in (' ', ')'): c = charbuffer.pop() if c == '\\': answer.append(charbuffer.pop()) else: assert c != '(' answer.append(c) return ''.join(answer) def lisptree_to_values(tree): assert tree.startswith('(list ') and tree.endswith(')') tree = lisptree_to_python_object(list(tree.decode('utf8'))[::-1]) assert tree[0] == 'list' answer = [] for subtree in tree[1:]: if subtree[0] == 'number': answer.append(float(subtree[1])) elif subtree[0] == 'date': answer.append('{}-{}-{}'.format( int(subtree[1]) if subtree[1] != '-1' else 'xx', int(subtree[2]) if subtree[2] != '-1' else 'xx', int(subtree[3]) if subtree[3] != '-1' else 'xx')) else: assert subtree[0] == 'name' answer.append(re.sub('\s+', ' ', subtree[2]).strip()) return '\t'.join(unicode(x) for x in answer) def main(): parser = argparse.ArgumentParser() parser.add_argument('infile', help='log file') parser.add_argument('iteration', help='iteration to extract') args = parser.parse_args() prefix = 'iter=%s:' % args.iteration ex_id = None with open(args.infile) as fin: for line in fin: line = line.strip() if line.startswith(prefix): if ex_id is not None: # No prediction for the previous example print ex_id ex_id = line.split()[3] elif ex_id is not None and line.startswith('Pred@0000:'): match = PATTERN.match(line) formula, denotation, deno_type, score, prob, comp = match.groups() denotation = lisptree_to_values(denotation) print u'{}\t{}'.format(ex_id, denotation) ex_id = None if ex_id is not None: print '\t'.join([ex_id, 'None']) if __name__ == '__main__': main()
1,663
610
<reponame>florian-hamberger/VulkanPBRT<filename>external/vsg/src/vsg/io/BinaryInput.cpp<gh_stars>100-1000 /* <editor-fold desc="MIT License"> Copyright(c) 2018 <NAME> Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. </editor-fold> */ #include <vsg/io/BinaryInput.h> #include <vsg/io/ReaderWriter.h> #include <cstring> #include <iostream> #include <sstream> using namespace vsg; BinaryInput::BinaryInput(std::istream& input, ref_ptr<ObjectFactory> in_objectFactory, ref_ptr<const Options> in_options) : Input(in_objectFactory, in_options), _input(input) { } void BinaryInput::_read(std::string& value) { uint32_t size = readValue<uint32_t>(nullptr); value.resize(size, 0); _input.read(value.data(), size); } void BinaryInput::read(size_t num, std::string* value) { if (num == 1) { _read(*value); } else { for (; num > 0; --num, ++value) { _read(*value); } } } vsg::ref_ptr<vsg::Object> BinaryInput::read() { ObjectID id = objectID(); if (auto itr = objectIDMap.find(id); itr != objectIDMap.end()) { return itr->second; } else { std::string className = readValue<std::string>(nullptr); vsg::ref_ptr<vsg::Object> object; if (className != "nullptr") { object = objectFactory->create(className.c_str()); if (object) { object->read(*this); } else { std::cout << "Unable to create instance of class : " << className << std::endl; } } objectIDMap[id] = object; return object; } }
1,018
370
/** @file multi_postlist.h * @brief Class for merging PostList objects from subdatabases. */ /* Copyright (C) 2007,2008,2009,2011,2015,2017 <NAME> * Copyright (C) 2009 Lemur Consulting Ltd * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License as * published by the Free Software Foundation; either version 2 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA */ #ifndef XAPIAN_INCLUDED_MULTI_POSTLIST_H #define XAPIAN_INCLUDED_MULTI_POSTLIST_H #include <string> #include "xapian/api/postlist.h" #include "xapian/backends/positionlist.h" /// Class for merging PostList objects from subdatabases. class MultiPostList : public PostList { /// Don't allow assignment. void operator=(const MultiPostList &) = delete; /// Don't allow copying. MultiPostList(const MultiPostList &) = delete; /// Current subdatabase. Xapian::doccount current; /// Number of PostList* entries in @a postlists. size_t n_shards; /// Sub-postlists which we use as a heap. PostList** postlists; /// Number of entries in docids; size_t docids_size; /// Heap of docids from the current positions of the postlists. Xapian::docid* docids; public: /// Constructor. MultiPostList(size_t n_shards_, PostList** postlists_); /// Destructor. ~MultiPostList(); /// Get a lower bound on the number of documents indexed by this term. Xapian::doccount get_termfreq_min() const; /// Get an upper bound on the number of documents indexed by this term. Xapian::doccount get_termfreq_max() const; /// Get an estimate of the number of documents indexed by this term. Xapian::doccount get_termfreq_est() const; /// Return the current docid. Xapian::docid get_docid() const; /// Return the wdf for the document at the current position. Xapian::termcount get_wdf() const; /// Return the weight contribution for the current position. double get_weight(Xapian::termcount doclen, Xapian::termcount unique_terms) const; /// Return true if the current position is past the last entry in this list. bool at_end() const; /// Recalculate the upper bound on what get_weight() can return. double recalc_maxweight(); /// Read the position list for the term in the current document and PositionList * open_position_list() const; /** Advance the current position to the next document in the postlist. * * The list starts before the first entry in the list, so next(), * skip_to() or check() must be called before any methods which need the * context of the current position. * * @param w_min The minimum weight contribution that is needed (this is * just a hint which PostList subclasses may ignore). * * @return If a non-NULL pointer is returned, then the caller should * substitute the returned pointer for its pointer to us, and then * delete us. This "pruning" can only happen for a non-leaf * subclass of this class. */ PostList* next(double w_min); /** Skip forward to the specified docid. * * If the specified docid isn't in the list, position ourselves on the * first document after it (or at_end() if no greater docids are present). * * @param w_min The minimum weight contribution that is needed (this is * just a hint which PostList subclasses may ignore). * * @return If a non-NULL pointer is returned, then the caller should * substitute the returned pointer for its pointer to us, and then * delete us. This "pruning" can only happen for a non-leaf * subclass of this class. */ PostList* skip_to(Xapian::docid, double w_min); // We don't implement check() because we're only used in a PostingIterator // wrapper and that doesn't call check(). // // Should that change, we could handle check() a bit more efficiently with // some extra bookkeeping on operations after check(), because we know // which subdatabase a given docid will be in, and so we only actually need // to call check() on that subdatabase. /// Return a string description of this object. std::string get_description() const; }; #endif // XAPIAN_INCLUDED_MULTI_POSTLIST_H
1,541
965
int i = 1; ATLTRACE2(atlTraceGeneral, 4, "Integer = %d\n", i); // Output: 'Integer = 1'
43
6,098
package hex.psvm; import hex.psvm.psvm.*; import org.junit.Ignore; import org.junit.runner.RunWith; import org.junit.runners.Suite; @Ignore // CI (Jenkins) uses a different approach, this is only for convenience in development @RunWith(Suite.class) @Suite.SuiteClasses({ KernelTest.class, LLMatrixTest.class, MatrixUtilsTest.class, IncompleteCholeskyFactorizationTest.class, PrimalDualIPMTest.class, PSVMTest.class }) public class PSVMTestSuite { }
201
338
from __future__ import absolute_import from __future__ import division from __future__ import print_function import collections try: import cPickle as pickle except ImportError: import pickle from multiprocessing import Pool def _lower(s): return s.lower() class SimpleTokenizer(): def __init__(self): pass def tokenize(self, data): """data: str""" data = data.replace('\n', ' ').replace('\r', '') splitted = data.split(' ') pool = Pool() tokenized = pool.map(_lower, splitted) return tokenized #TODO: add min_count, together with n_words to determine if UNK is needed def frequency_count(self, tokenized_data, n_words, min_count): count = [['UNK', -1]] num_above_threshold = 0 counter = collections.Counter(tokenized_data) for k, v in counter.items(): if v >= min_count: num_above_threshold += 1 n_words = min(n_words, num_above_threshold) # if more tokens than needed, map the rest to UNK if len(counter) > n_words: count.extend(collections.Counter(tokenized_data).most_common(n_words - 1)) else: count = collections.Counter(tokenized_data).most_common(n_words) dictionary = dict() for word, _ in count: dictionary[word] = len(dictionary) reversed_dictionary = dict(zip(dictionary.values(), dictionary.keys())) return dictionary, reversed_dictionary def index(self, tokenized_data, dictionary): data = list() unk_count = 0 def _index(word): if word in dictionary: index = dictionary[word] else: index = dictionary['UNK'] return index data = [_index(word) for word in tokenized_data] return data def do_index_data(self, data, n_words=10000, min_count=100): """transform data: str into a tokens: list. tokens are mapped to {0, 1, ..., n_words - 1}""" self.tokenized = self.tokenize(data) self.dictionary, self.reversed_dictionary = self.frequency_count(self.tokenized, n_words, min_count) self.indexed = self.index(self.tokenized, self.dictionary) return self.indexed
978
3,034
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache license, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the license for the specific language governing permissions and * limitations under the license. */ package org.apache.logging.log4j.test; import org.apache.logging.log4j.ThreadContext; import org.apache.logging.log4j.util.Timer; import java.util.Map; import static org.junit.jupiter.api.Assertions.*; public class ThreadContextUtilityClass { public static void perfTest() { ThreadContext.clearMap(); final Timer complete = new Timer("ThreadContextTest"); complete.start(); ThreadContext.put("Var1", "value 1"); ThreadContext.put("Var2", "value 2"); ThreadContext.put("Var3", "value 3"); ThreadContext.put("Var4", "value 4"); ThreadContext.put("Var5", "value 5"); ThreadContext.put("Var6", "value 6"); ThreadContext.put("Var7", "value 7"); ThreadContext.put("Var8", "value 8"); ThreadContext.put("Var9", "value 9"); ThreadContext.put("Var10", "value 10"); final int loopCount = 1000000; final Timer timer = new Timer("ThreadContextCopy", loopCount); timer.start(); for (int i = 0; i < loopCount; ++i) { final Map<String, String> map = ThreadContext.getImmutableContext(); assertNotNull(map); } timer.stop(); complete.stop(); System.out.println(timer.toString()); System.out.println(complete.toString()); } public static void testGetContextReturnsEmptyMapIfEmpty() { ThreadContext.clearMap(); assertTrue(ThreadContext.getContext().isEmpty()); } public static void testGetContextReturnsMutableCopy() { ThreadContext.clearMap(); final Map<String, String> map1 = ThreadContext.getContext(); assertTrue(map1.isEmpty()); map1.put("K", "val"); // no error assertEquals("val", map1.get("K")); // adding to copy does not affect thread context map assertTrue(ThreadContext.getContext().isEmpty()); ThreadContext.put("key", "val2"); final Map<String, String> map2 = ThreadContext.getContext(); assertEquals(1, map2.size()); assertEquals("val2", map2.get("key")); map2.put("K", "val"); // no error assertEquals("val", map2.get("K")); // first copy is not affected assertNotSame(map1, map2); assertEquals(1, map1.size()); } public static void testGetImmutableContextReturnsEmptyMapIfEmpty() { ThreadContext.clearMap(); assertTrue(ThreadContext.getImmutableContext().isEmpty()); } public static void testGetImmutableContextReturnsImmutableMapIfNonEmpty() { ThreadContext.clearMap(); ThreadContext.put("key", "val"); final Map<String, String> immutable = ThreadContext.getImmutableContext(); assertThrows(UnsupportedOperationException.class, () -> immutable.put("otherkey", "otherval")); } public static void testGetImmutableContextReturnsImmutableMapIfEmpty() { ThreadContext.clearMap(); final Map<String, String> immutable = ThreadContext.getImmutableContext(); assertThrows(UnsupportedOperationException.class, () -> immutable.put("otherkey", "otherval")); } public static void testGetImmutableStackReturnsEmptyStackIfEmpty() { ThreadContext.clearStack(); assertTrue(ThreadContext.getImmutableStack().asList().isEmpty()); } public static void testPut() { ThreadContext.clearMap(); assertNull(ThreadContext.get("testKey")); ThreadContext.put("testKey", "testValue"); assertEquals("testValue", ThreadContext.get("testKey")); } }
1,563
921
<filename>curv/export_mesh.cc // Copyright 2016-2021 <NAME> // Licensed under the Apache License, version 2.0 // See accompanying file LICENSE or https://www.apache.org/licenses/LICENSE-2.0 #include <iostream> #include <climits> #include <cmath> #include <cstdlib> #include <chrono> #include <thread> #include <glm/geometric.hpp> #include "export.h" #include "mesher.h" #include <libcurv/io/compiled_shape.h> #include <libcurv/io/mesh.h> #include <libcurv/shape.h> #include <libcurv/exception.h> #include <libcurv/context.h> #include <libcurv/die.h> using namespace curv::io; void export_mesh(Mesh_Format, curv::Value value, curv::Program&, const Export_Params& params, std::ostream& out); void export_stl(curv::Value value, curv::Program& prog, const Export_Params& params, Output_File& ofile) { ofile.open(); export_mesh(Mesh_Format::stl, value, prog, params, ofile.ostream()); } void export_obj(curv::Value value, curv::Program& prog, const Export_Params& params, Output_File& ofile) { ofile.open(); export_mesh(Mesh_Format::obj, value, prog, params, ofile.ostream()); } void export_x3d(curv::Value value, curv::Program& prog, const Export_Params& params, Output_File& ofile) { ofile.open(); export_mesh(Mesh_Format::x3d, value, prog, params, ofile.ostream()); } void export_gltf(curv::Value value, curv::Program& prog, const Export_Params& params, Output_File& ofile) { ofile.open(); export_mesh(Mesh_Format::gltf, value, prog, params, ofile.ostream()); } void describe_mesh_opts(std::ostream& out) { out << "-O mgen=#smooth|#sharp : Mesh generator algorithm (default #smooth).\n" "-O jit : Fast evaluation using JIT compiler (uses C++ compiler).\n" "-O vsize=<voxel size>\n" "-O vcount=<approximate voxel count>\n" "-O eps=<small number> : epsilon to compute normal by partial differences\n" "-O adaptive=<0...1> : Deprecated. Use meshlab to simplify mesh.\n" ; } void describe_colour_mesh_opts(std::ostream& out) { describe_mesh_opts(out); out << "-O colouring=#face|#vertex (default #face)\n" ; } void export_mesh(Mesh_Format format, curv::Value value, curv::Program& prog, const Export_Params& params, std::ostream& out) { curv::Shape_Program shape(prog); curv::At_Program cx(prog); if (!shape.recognize(value, nullptr) || !shape.is_3d_) throw curv::Exception(cx, "mesh export: not a 3D shape"); Mesh_Export opts; for (auto& i : params.map_) { Param p{params, i}; if (p.name_ == "mgen") { auto val = p.to_symbol(); if (val == "smooth") opts.mgen_ = Mesh_Gen::smooth; else if (val == "sharp") opts.mgen_ = Mesh_Gen::sharp; else if (val == "iso") opts.mgen_ = Mesh_Gen::iso; else if (val == "hybrid") opts.mgen_ = Mesh_Gen::hybrid; else if (val == "tmc") opts.mgen_ = Mesh_Gen::tmc; else throw curv::Exception(p, "'mgen' must be #smooth|#sharp|#iso|#hybrid|#tmc"); } else if (p.name_ == "jit") { opts.jit_ = p.to_bool(); } else if (p.name_ == "vsize") { opts.vsize_ = p.to_double(); if (opts.vsize_ <= 0.0) { throw curv::Exception(p, "'vsize' must be positive"); } } else if (p.name_ == "vcount") { opts.vcount_ = p.to_int(1, INT_MAX); } else if (p.name_ == "eps") { opts.eps_ = p.to_double(); } else if (p.name_ == "adaptive") { opts.adaptive_ = p.to_double(1.0); if (opts.adaptive_ < 0.0 || opts.adaptive_ > 1.0) { throw curv::Exception(p, "'adaptive' must be in range 0...1"); } } else if (format == Mesh_Format::x3d && p.name_ == "colouring") { auto val = p.to_symbol(); if (val == "face") opts.colouring_ = Mesh_Export::face_colour; else if (val == "vertex") opts.colouring_ = Mesh_Export::vertex_colour; else { throw curv::Exception(p, "'colouring' must be #face or #vertex"); } } else p.unknown_parameter(); } std::unique_ptr<curv::io::Compiled_Shape> cshape = nullptr; if (opts.jit_) { auto cstart_time = std::chrono::steady_clock::now(); cshape = std::make_unique<curv::io::Compiled_Shape>(shape); auto cend_time = std::chrono::steady_clock::now(); std::chrono::duration<double> compile_time = cend_time - cstart_time; std::cerr << "Compiled shape in " << compile_time.count() << "s\n"; std::cerr.flush(); } else { std::cerr << "You are in SLOW MODE. Use '-O jit' to speed up rendering.\n"; } const curv::Shape* pshape; if (cshape) pshape = &*cshape; else pshape = &shape; bool multithreaded = (cshape != nullptr); #if LEAN_BUILD tmc_mesher(*pshape, multithreaded, opts, cx, format, out); #else switch (opts.mgen_) { case Mesh_Gen::smooth: vdb_mesher(*pshape, multithreaded, opts, cx, format, out); break; case Mesh_Gen::sharp: case Mesh_Gen::iso: case Mesh_Gen::hybrid: libfive_mesher(*pshape, multithreaded, opts, cx, format, out); break; case Mesh_Gen::tmc: tmc_mesher(*pshape, multithreaded, opts, cx, format, out); break; default: throw curv::Exception(cx, "mesh export: unknown mesh generator"); } #endif }
2,674
566
<filename>args4j/test/org/kohsuke/args4j/Setter.java package org.kohsuke.args4j; public class Setter { public String str = "default"; @Option(name="-str",usage="set a string") public void setStr(String str) { this.str = str.toUpperCase(); } }
114
318
<filename>server/src/core/com/bj58/spat/gaea/server/core/convert/ConvertFacotry.java /* * Copyright Beijing 58 Information Technology Co.,Ltd. * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.bj58.spat.gaea.server.core.convert; import com.bj58.spat.gaea.protocol.sfp.enumeration.SerializeType; import com.bj58.spat.gaea.protocol.sfp.v1.Protocol; import com.bj58.spat.gaea.server.contract.log.ILog; import com.bj58.spat.gaea.server.contract.log.LogFactory; /** * A convert facotry for create converter * * @author Service Platform Architecture Team (<EMAIL>) */ public class ConvertFacotry { /** * json */ private static JsonConvert jsonConvert = new JsonConvert(); /** * java */ private static JavaConvert javaConvert = new JavaConvert(); /** * GaeaBinary */ private static GaeaBinaryConvert gaeaBinaryConvert = new GaeaBinaryConvert(); private static ILog logger = LogFactory.getLogger(ConvertFacotry.class); public static IConvert getConvert(Protocol p) { if(p.getSerializeType() == SerializeType.GAEABinary) { return gaeaBinaryConvert; } else if(p.getSerializeType() == SerializeType.JAVABinary) { return javaConvert; } else if(p.getSerializeType() == SerializeType.JSON) { return jsonConvert; } logger.error("can't get IConvert not : json ,java, customBinary "); return null; } }
767
650
<filename>core/engine/src/main/java/cn/netdiscovery/core/registry/Registry.java package cn.netdiscovery.core.registry; /** * Created by tony on 2019-06-08. */ public abstract class Registry { protected Provider provider; public Provider getProvider() { return provider; } public abstract void register(Provider provider, int port); }
118
317
''' Created on Feb 7, 2012 @author: marat ''' class PDBAtomParser(object): ''' class PDBAtomParser parses ATOM or HETATM line in PDB file. It can only properly handle ATOM or HETATM records. Before getting individual fields one should test whether the record is atom based using PDBAtomParser.isAtomType() method. Perhaps the best way to use this class at this point is to generate dictionary using PDBAtomParser.getDict(aline1) ''' irec=dict(record=[0,6],atomid=[6,11],name=[12,16],resname=[17,20], resid=[22,26],coord=[30,54],element=[76,78]) atype=dict(record="string",atomid="int",name="string",resname="string", resid="int",coord="float array",element="string") def __init__(self): ''' Constructor ''' pass @staticmethod def record(name,buf): # print(inspect.getsource(PDBAtomParser.recordName)) ir=PDBAtomParser.irec[name] atype=PDBAtomParser.atype[name] value = buf[ir[0]:ir[1]] if value.strip()=='': return None if atype=='int': try: value = int(value) except: value = None elif atype=='float': try: value = float(value) except: value = None elif atype=='string': pass elif atype=='float array': try: value = [float(x) for x in value.split()] except: value = None else: raise ValueError("unknown type definition") return value @staticmethod def getDict(buf): # d = dict((name,PDBAtomParser.record(name,buf)) # for name in PDBAtomParser.irec.iterkeys()) d={} for name in PDBAtomParser.irec.iterkeys(): value=PDBAtomParser.record(name,buf) if value!=None: d[name]=value return d @staticmethod def isAtomType(buf): return buf[0:4]!="ATOM" or buf[0:6]!="HETATM" def __str__(self): return self.buf if __name__ == '__main__': aline1="ATOM 588 1HG GLU 18 -13.363 -4.163 -2.372 1.00 0.00 H" aline2="ATOM 588 GLU -13.363 -4.163 -2.372 1.00 0.00" aline3="ATTM 588 GLU -13.363 -4.163 -2.372 1.00 0.00" print(PDBAtomParser.record("name",aline2)) print(PDBAtomParser.record("name",aline1)) print(PDBAtomParser.getDict(aline1)) print(PDBAtomParser.getDict(aline2)) print(PDBAtomParser.getDict(aline3)) # 1 - 6 Record name "ATOM " # 7 - 11 Integer serial Atom serial number. # 13 - 16 Atom name Atom name. # 17 Character altLoc Alternate location indicator. # 18 - 20 Residue name resName Residue name. # 22 Character chainID Chain identifier. # 23 - 26 Integer resSeq Residue sequence number. # 27 AChar iCode Code for insertion of residues. # 31 - 38 Real(8.3) x Orthogonal coordinates for X in # Angstroms # 39 - 46 Real(8.3) y Orthogonal coordinates for Y in # Angstroms # 47 - 54 Real(8.3) z Orthogonal coordinates for Z in # Angstroms # 55 - 60 Real(6.2) occupancy Occupancy. # 61 - 66 Real(6.2) tempFactor Temperature factor. # 77 - 78 LString(2) element Element symbol, right-justified. # 79 - 80 LString(2) charge Charge on the atom. # Example # 1 2 3 4 5 6 7 8 # 12345678901234567890123456789012345678901234567890123456789012345678901234567890 # MODEL 1 # ATOM 1 N ALA 1 11.104 6.134 -6.504 1.00 0.00 N # ATOM 2 CA ALA 1 11.639 6.071 -5.147 1.00 0.00 C # ... # ... # ATOM 293 1HG GLU 18 -14.861 -4.847 0.361 1.00 0.00 H # ATOM 294 2HG GLU 18 -13.518 -3.769 0.084 1.00 0.00 H
2,580
304
{ "name": "slither-server", "description": "Open source slither server implementation", "version": "0.0.12-dev", "main": "./index.js", "bin": { "slither": "./bin/slither" }, "repository": { "type": "git", "url": "https://github.com/iiegor/slither" }, "license": "MIT", "dependencies": { "chalk": "^1.1.3", "coffee-script": "^1.10.0", "object-keys": "^1.0.9", "semver": "^5.1.0", "ws": "^0.8.1" }, "packageDependencies": {}, "scripts": { "start": "node ./bin/slither", "start:prod": "npm run compile && node ./bin/slither --prod", "compile": "coffee --output lib --compile src" }, "private": true }
304
3,227
<reponame>ffteja/cgal namespace CGAL { /*! \addtogroup PkgHandlesAndCirculatorsTags Iterators and circulators as well as different categories of circulators can be distinguished with the use of discriminating functions and the following circulator tags. A couple of base classes simplify the task of writing own circulators. They declare the appropriate tags and the local types needed for circulators. To use the tags or base classes only it is sufficient to include: \sa `query_circulator_or_iterator` \sa `Circulator_traits` \sa `Assert_circulator` \sa `CGAL_For_all` \sa `is_empty_range` \sa `Circulator` \cgalHeading{Example} The above declarations can be used to distinguish between iterators and circulators and between different circulator categories. The assertions can be used to protect a templatized algorithm against instantiations that do not fulfill the requirements. The following example program illustrates both. \cgalExample{Circulator/circulator_prog3.cpp} */ /*! \addtogroup PkgHandlesAndCirculatorsBaseClasses \cgalHeading{Implementation} Since not all current compilers can eliminate the space needed for the compile time tags even when deriving from them, we implement a variant for each base class that contains a protected `void*` data member called `_ptr`. Here, the allocated space in the derived classes can be reused. */ /*! \ingroup PkgHandlesAndCirculatorsTags A tag for any circulator type. */ struct Circulator_tag {}; /*! \ingroup PkgHandlesAndCirculatorsTags A tag for any iterator type. */ struct Iterator_tag {}; /*! \ingroup PkgHandlesAndCirculatorsTags */ struct Forward_circulator_tag : public virtual std::forward_circulator_tag{}; /*! \ingroup PkgHandlesAndCirculatorsTags */ struct Bidirectional_circulator_tag : public virtual std::bidirectional_iterator_tag {}; /*! \ingroup PkgHandlesAndCirculatorsTags */ struct Random_access_circulator_tag : public virtual std::random_access_circulator_tag {}; /*! */ template < class Category, class T, class Dist = std::ptrdiff_t, class Size = std::size_t, class Ptr = T*, class Ref = T& > struct Circulator_base {}; /*! \ingroup PkgHandlesAndCirculatorsBaseClasses */ template <class T, class Dist, class Size> struct Forward_circulator_base {}; /*! \ingroup PkgHandlesAndCirculatorsBaseClasses */ template <class T, class Dist, class Size> struct Bidirectional_circulator_base {}; /*! \ingroup PkgHandlesAndCirculatorsBaseClasses */ template <class T, class Dist, class Size> struct Random_access_circulator_base {}; /*! \ingroup PkgHandlesAndCirculatorsBaseClasses forward circulator. */ template <class T, class Dist, class Size> class Forward_circulator_ptrbase {}; /*! \ingroup PkgHandlesAndCirculatorsBaseClasses bidirectional circulator. */ template <class T, class Dist, class Size> class Bidirectional_circulator_ptrbase {}; /*! \ingroup PkgHandlesAndCirculatorsBaseClasses random access circulator. */ template <class T, class Dist, class Size> class Random_access_circulator_ptrbase {}; } /* end namespace CGAL */
922
6,224
/* * Copyright (c) 2021 Microchip Technology Inc. and its subsidiaries. * * SPDX-License-Identifier: Apache-2.0 */ #ifndef _MEC172X_I2C_SMB_H #define _MEC172X_I2C_SMB_H #include <stdint.h> #include <stddef.h> /* Version 3.7 MCHP I2C/SMBus Controller specification */ #define MCHP_I2C_BAUD_CLK_HZ 16000000u #define MCHP_I2C_SMB_INST_SPACING 0x400u #define MCHP_I2C_SMB_INST_SPACING_P2 10u #define MCHP_I2C_SMB0_BASE_ADDR 0x40004000u #define MCHP_I2C_SMB1_BASE_ADDR 0x40004400u #define MCHP_I2C_SMB2_BASE_ADDR 0x40004800u #define MCHP_I2C_SMB3_BASE_ADDR 0x40004c00u #define MCHP_I2C_SMB4_BASE_ADDR 0x40005000u /* 0 <= n < MCHP_I2C_SMB_MAX_INSTANCES */ #define MCHP_I2C_SMB_BASE_ADDR(n) \ ((MCHP_I2C_SMB0_BASE_ADDR) + \ ((uint32_t)(n) * (MCHP_I2C_SMB_INST_SPACING))) /* * Offset 0x00 * Control and Status register * Write to Control * Read from Status * Size 8-bit */ #define MCHP_I2C_SMB_CTRL_OFS 0x00u #define MCHP_I2C_SMB_CTRL_MASK 0xcfu #define MCHP_I2C_SMB_CTRL_ACK BIT(0) #define MCHP_I2C_SMB_CTRL_STO BIT(1) #define MCHP_I2C_SMB_CTRL_STA BIT(2) #define MCHP_I2C_SMB_CTRL_ENI BIT(3) /* bits [5:4] reserved */ #define MCHP_I2C_SMB_CTRL_ESO BIT(6) #define MCHP_I2C_SMB_CTRL_PIN BIT(7) /* Status Read-only */ #define MCHP_I2C_SMB_STS_OFS 0x00u #define MCHP_I2C_SMB_STS_NBB BIT(0) #define MCHP_I2C_SMB_STS_LAB BIT(1) #define MCHP_I2C_SMB_STS_AAS BIT(2) #define MCHP_I2C_SMB_STS_LRB_AD0 BIT(3) #define MCHP_I2C_SMB_STS_BER BIT(4) #define MCHP_I2C_SMB_STS_EXT_STOP BIT(5) #define MCHP_I2C_SMB_STS_SAD BIT(6) #define MCHP_I2C_SMB_STS_PIN BIT(7) /* * Offset 0x04 * Own Address b[7:0] = Slave address 1 * b[14:8] = Slave address 2 */ #define MCHP_I2C_SMB_OWN_ADDR_OFS 0x04u #define MCHP_I2C_SMB_OWN_ADDR2_OFS 0x05u #define MCHP_I2C_SMB_OWN_ADDR_MASK 0x7f7fu /* * Offset 0x08 * Data register, 8-bit * Data to be shifted out or shifted in. */ #define MCHP_I2C_SMB_DATA_OFS 0x08u /* Offset 0x0C Leader Command register */ #define MCHP_I2C_SMB_MSTR_CMD_OFS 0x0cu #define MCHP_I2C_SMB_MSTR_CMD_RD_CNT_OFS 0x0fu /* byte 3 */ #define MCHP_I2C_SMB_MSTR_CMD_WR_CNT_OFS 0x0eu /* byte 2 */ #define MCHP_I2C_SMB_MSTR_CMD_OP_OFS 0x0du /* byte 1 */ #define MCHP_I2C_SMB_MSTR_CMD_M_OFS 0x0cu /* byte 0 */ #define MCHP_I2C_SMB_MSTR_CMD_MASK 0xffff3ff3u /* 32-bit definitions */ #define MCHP_I2C_SMB_MSTR_CMD_MRUN BIT(0) #define MCHP_I2C_SMB_MSTR_CMD_MPROCEED BIT(1) #define MCHP_I2C_SMB_MSTR_CMD_START0 BIT(8) #define MCHP_I2C_SMB_MSTR_CMD_STARTN BIT(9) #define MCHP_I2C_SMB_MSTR_CMD_STOP BIT(10) #define MCHP_I2C_SMB_MSTR_CMD_PEC_TERM BIT(11) #define MCHP_I2C_SMB_MSTR_CMD_READM BIT(12) #define MCHP_I2C_SMB_MSTR_CMD_READ_PEC BIT(13) #define MCHP_I2C_SMB_MSTR_CMD_RD_CNT_POS 24u #define MCHP_I2C_SMB_MSTR_CMD_WR_CNT_POS 16u /* byte 0 definitions */ #define MCHP_I2C_SMB_MSTR_CMD_B0_MRUN BIT(0) #define MCHP_I2C_SMB_MSTR_CMD_B0_MPROCEED BIT(1) /* byte 1 definitions */ #define MCHP_I2C_SMB_MSTR_CMD_B1_START0 BIT((8 - 8)) #define MCHP_I2C_SMB_MSTR_CMD_B1_STARTN BIT((9 - 8)) #define MCHP_I2C_SMB_MSTR_CMD_B1_STOP BIT((10 - 8)) #define MCHP_I2C_SMB_MSTR_CMD_B1_PEC_TERM BIT((11 - 8)) #define MCHP_I2C_SMB_MSTR_CMD_B1_READM BIT((12 - 8)) #define MCHP_I2C_SMB_MSTR_CMD_B1_READ_PEC BIT((13 - 8)) /* Offset 0x10 Follower Command register */ #define MCHP_I2C_SMB_SLV_CMD_OFS 0x10u #define MCHP_I2C_SMB_SLV_CMD_MASK 0x00ffff07u #define MCHP_I2C_SMB_SLV_CMD_SRUN BIT(0) #define MCHP_I2C_SMB_SLV_CMD_SPROCEED BIT(1) #define MCHP_I2C_SMB_SLV_CMD_SEND_PEC BIT(2) #define MCHP_I2C_SMB_SLV_WR_CNT_POS 8u #define MCHP_I2C_SMB_SLV_RD_CNT_POS 16u /* Offset 0x14 PEC CRC register, 8-bit read-write */ #define MCHP_I2C_SMB_PEC_CRC_OFS 0x14u /* Offset 0x18 Repeated Start Hold Time register, 8-bit read-write */ #define MCHP_I2C_SMB_RSHT_OFS 0x18u /* Offset 0x20 Completion register, 32-bit */ #define MCHP_I2C_SMB_CMPL_OFS 0x20u #define MCHP_I2C_SMB_CMPL_MASK 0xe33b7f7Cu #define MCHP_I2C_SMB_CMPL_RW1C_MASK 0xe1397f00u #define MCHP_I2C_SMB_CMPL_DTEN BIT(2) #define MCHP_I2C_SMB_CMPL_MCEN BIT(3) #define MCHP_I2C_SMB_CMPL_SCEN BIT(4) #define MCHP_I2C_SMB_CMPL_BIDEN BIT(5) #define MCHP_I2C_SMB_CMPL_TIMERR BIT(6) #define MCHP_I2C_SMB_CMPL_DTO_RWC BIT(8) #define MCHP_I2C_SMB_CMPL_MCTO_RWC BIT(9) #define MCHP_I2C_SMB_CMPL_SCTO_RWC BIT(10) #define MCHP_I2C_SMB_CMPL_CHDL_RWC BIT(11) #define MCHP_I2C_SMB_CMPL_CHDH_RWC BIT(12) #define MCHP_I2C_SMB_CMPL_BER_RWC BIT(13) #define MCHP_I2C_SMB_CMPL_LAB_RWC BIT(14) #define MCHP_I2C_SMB_CMPL_SNAKR_RWC BIT(16) #define MCHP_I2C_SMB_CMPL_STR_RO BIT(17) #define MCHP_I2C_SMB_CMPL_SPROT_RWC BIT(19) #define MCHP_I2C_SMB_CMPL_RPT_RD_RWC BIT(20) #define MCHP_I2C_SMB_CMPL_RPT_WR_RWC BIT(21) #define MCHP_I2C_SMB_CMPL_MNAKX_RWC BIT(24) #define MCHP_I2C_SMB_CMPL_MTR_RO BIT(25) #define MCHP_I2C_SMB_CMPL_IDLE_RWC BIT(29) #define MCHP_I2C_SMB_CMPL_MDONE_RWC BIT(30) #define MCHP_I2C_SMB_CMPL_SDONE_RWC BIT(31) /* Offset 0x24 Idle Scaling register */ #define MCHP_I2C_SMB_IDLSC_OFS 0x24u #define MCHP_I2C_SMB_IDLSC_DLY_OFS 0x24u #define MCHP_I2C_SMB_IDLSC_BUS_OFS 0x26u #define MCHP_I2C_SMB_IDLSC_MASK 0x0fff0fffu #define MCHP_I2C_SMB_IDLSC_BUS_MIN_POS 0u #define MCHP_I2C_SMB_IDLSC_DLY_POS 16u /* Offset 0x28 Configuration register */ #define MCHP_I2C_SMB_CFG_OFS 0x28u #define MCHP_I2C_SMB_CFG_MASK 0xf00f5Fbfu #define MCHP_I2C_SMB_CFG_PORT_SEL_POS 0 #define MCHP_I2C_SMB_CFG_PORT_SEL_MASK 0x0fu #define MCHP_I2C_SMB_CFG_TCEN BIT(4) #define MCHP_I2C_SMB_CFG_SLOW_CLK BIT(5) #define MCHP_I2C_SMB_CFG_PCEN BIT(7) #define MCHP_I2C_SMB_CFG_FEN BIT(8) #define MCHP_I2C_SMB_CFG_RESET BIT(9) #define MCHP_I2C_SMB_CFG_ENAB BIT(10) #define MCHP_I2C_SMB_CFG_DSA BIT(11) #define MCHP_I2C_SMB_CFG_FAIR BIT(12) #define MCHP_I2C_SMB_CFG_GC_DIS BIT(14) #define MCHP_I2C_SMB_CFG_FLUSH_SXBUF_WO BIT(16) #define MCHP_I2C_SMB_CFG_FLUSH_SRBUF_WO BIT(17) #define MCHP_I2C_SMB_CFG_FLUSH_MXBUF_WO BIT(18) #define MCHP_I2C_SMB_CFG_FLUSH_MRBUF_WO BIT(19) #define MCHP_I2C_SMB_CFG_EN_AAS BIT(28) #define MCHP_I2C_SMB_CFG_ENIDI BIT(29) #define MCHP_I2C_SMB_CFG_ENMI BIT(30) #define MCHP_I2C_SMB_CFG_ENSI BIT(31) /* Offset 0x2C Bus Clock register */ #define MCHP_I2C_SMB_BUS_CLK_OFS 0x2cu #define MCHP_I2C_SMB_BUS_CLK_MASK 0x0000ffffu #define MCHP_I2C_SMB_BUS_CLK_LO_POS 0u #define MCHP_I2C_SMB_BUS_CLK_HI_POS 8u /* Offset 0x30 Block ID register, 8-bit read-only */ #define MCHP_I2C_SMB_BLOCK_ID_OFS 0x30u #define MCHP_I2C_SMB_BLOCK_ID_MASK 0xffu /* Offset 0x34 Block Revision register, 8-bit read-only */ #define MCHP_I2C_SMB_BLOCK_REV_OFS 0x34u #define MCHP_I2C_SMB_BLOCK_REV_MASK 0xffu /* Offset 0x38 Bit-Bang Control register, 8-bit read-write */ #define MCHP_I2C_SMB_BB_OFS 0x38u #define MCHP_I2C_SMB_BB_MASK 0x7fu #define MCHP_I2C_SMB_BB_EN BIT(0) #define MCHP_I2C_SMB_BB_SCL_DIR_IN 0 #define MCHP_I2C_SMB_BB_SCL_DIR_OUT BIT(1) #define MCHP_I2C_SMB_BB_SDA_DIR_IN 0 #define MCHP_I2C_SMB_BB_SDA_DIR_OUT BIT(2) #define MCHP_I2C_SMB_BB_CL BIT(3) #define MCHP_I2C_SMB_BB_DAT BIT(4) #define MCHP_I2C_SMB_BB_IN_POS 5u #define MCHP_I2C_SMB_BB_IN_MASK0 0x03u #define MCHP_I2C_SMB_BB_IN_MASK SHLU32(0x03, 5) #define MCHP_I2C_SMB_BB_CLKI_RO BIT(5) #define MCHP_I2C_SMB_BB_DATI_RO BIT(6) /* Offset 0x40 Data Timing register */ #define MCHP_I2C_SMB_DATA_TM_OFS 0x40u #define MCHP_I2C_SMB_DATA_TM_MASK GENMASK(31, 0) #define MCHP_I2C_SMB_DATA_TM_DATA_HOLD_POS 0u #define MCHP_I2C_SMB_DATA_TM_DATA_HOLD_MASK 0xffu #define MCHP_I2C_SMB_DATA_TM_DATA_HOLD_MASK0 0xffu #define MCHP_I2C_SMB_DATA_TM_RESTART_POS 8u #define MCHP_I2C_SMB_DATA_TM_RESTART_MASK 0xff00u #define MCHP_I2C_SMB_DATA_TM_RESTART_MASK0 0xffu #define MCHP_I2C_SMB_DATA_TM_STOP_POS 16u #define MCHP_I2C_SMB_DATA_TM_STOP_MASK 0xff0000u #define MCHP_I2C_SMB_DATA_TM_STOP_MASK0 0xffu #define MCHP_I2C_SMB_DATA_TM_FSTART_POS 24u #define MCHP_I2C_SMB_DATA_TM_FSTART_MASK 0xff000000u #define MCHP_I2C_SMB_DATA_TM_FSTART_MASK0 0xffu /* Offset 0x44 Time-out Scaling register */ #define MCHP_I2C_SMB_TMTSC_OFS 0x44u #define MCHP_I2C_SMB_TMTSC_MASK GENMASK(31, 0) #define MCHP_I2C_SMB_TMTSC_CLK_HI_POS 0u #define MCHP_I2C_SMB_TMTSC_CLK_HI_MASK 0xffu #define MCHP_I2C_SMB_TMTSC_CLK_HI_MASK0 0xffu #define MCHP_I2C_SMB_TMTSC_SLV_POS 8u #define MCHP_I2C_SMB_TMTSC_SLV_MASK 0xff00u #define MCHP_I2C_SMB_TMTSC_SLV_MASK0 0xffu #define MCHP_I2C_SMB_TMTSC_MSTR_POS 16u #define MCHP_I2C_SMB_TMTSC_MSTR_MASK 0xff0000u #define MCHP_I2C_SMB_TMTSC_MSTR_MASK0 0xffu #define MCHP_I2C_SMB_TMTSC_BUS_POS 24u #define MCHP_I2C_SMB_TMTSC_BUS_MASK 0xff000000u #define MCHP_I2C_SMB_TMTSC_BUS_MASK0 0xffu /* Offset 0x48 Follower Transmit Buffer register 8-bit read-write */ #define MCHP_I2C_SMB_SLV_TX_BUF_OFS 0x48u /* Offset 0x4C Follower Receive Buffer register 8-bit read-write */ #define MCHP_I2C_SMB_SLV_RX_BUF_OFS 0x4cu /* Offset 0x50 Leader Transmit Buffer register 8-bit read-write */ #define MCHP_I2C_SMB_MTR_TX_BUF_OFS 0x50u /* Offset 0x54 Leader Receive Buffer register 8-bit read-write */ #define MCHP_I2C_SMB_MTR_RX_BUF_OFS 0x54u /* Offset 0x58 I2C FSM read-only */ #define MCHP_I2C_SMB_I2C_FSM_OFS 0x58u /* Offset 0x5C SMB Netork layer FSM read-only */ #define MCHP_I2C_SMB_FSM_OFS 0x5cu /* Offset 0x60 Wake Status register */ #define MCHP_I2C_SMB_WAKE_STS_OFS 0x60u #define MCHP_I2C_SMB_WAKE_STS_START_RWC BIT(0) /* Offset 0x64 Wake Enable register */ #define MCHP_I2C_SMB_WAKE_EN_OFS 0x64u #define MCHP_I2C_SMB_WAKE_EN BIT(0) /* Offset 0x68 */ #define MCHP_I2C_SMB_WAKE_SYNC_OFS 0x68u #define MCHP_I2C_SMB_WAKE_FAST_RESYNC_EN BIT(0) /** @brief I2C-SMBus with network layer registers. */ struct i2c_smb_regs { volatile uint8_t CTRLSTS; uint8_t RSVD1[3]; volatile uint32_t OWN_ADDR; volatile uint8_t I2CDATA; uint8_t RSVD2[3]; volatile uint32_t MCMD; volatile uint32_t SCMD; volatile uint8_t PEC; uint8_t RSVD3[3]; volatile uint32_t RSHTM; volatile uint32_t EXTLEN; volatile uint32_t COMPL; volatile uint32_t IDLSC; volatile uint32_t CFG; volatile uint32_t BUSCLK; volatile uint32_t BLKID; volatile uint32_t BLKREV; volatile uint8_t BBCTRL; uint8_t RSVD7[3]; volatile uint32_t CLKSYNC; volatile uint32_t DATATM; volatile uint32_t TMOUTSC; volatile uint8_t SLV_TXB; uint8_t RSVD8[3]; volatile uint8_t SLV_RXB; uint8_t RSVD9[3]; volatile uint8_t MTR_TXB; uint8_t RSVD10[3]; volatile uint8_t MTR_RXB; uint8_t RSVD11[3]; volatile uint32_t FSM; volatile uint32_t FSM_SMB; volatile uint8_t WAKE_STS; uint8_t RSVD12[3]; volatile uint8_t WAKE_EN; uint32_t RSVD13[2]; volatile uint32_t PROM_ISTS; volatile uint32_t PROM_IEN; volatile uint32_t PROM_CTRL; volatile uint32_t SHADOW_DATA; }; /* Size = 128(0x80) */ #endif /* #ifndef _MEC172X_I2C_SMB_H */
6,116
362
// Copyright (c) 2018-2020, <NAME>. For more information see 'LICENSE' #pragma once #include "framegraph/Public/EResourceState.h" #include "framegraph/Shared/ResourceDataRange.h" #include "VBuffer.h" namespace FG { // // Vulkan Buffer thread local // class VLocalBuffer final { friend class VBufferUnitTest; // types public: using BufferRange = ResourceDataRange< VkDeviceSize >; struct BufferState { // variables EResourceState state = Default; BufferRange range; VTask task; // methods BufferState () {} BufferState (EResourceState state, VkDeviceSize begin, VkDeviceSize end, VTask task) : state{state}, range{begin, end}, task{task} {} }; private: struct BufferAccess { // variables BufferRange range; VkPipelineStageFlagBits stages = Zero; VkAccessFlagBits access = Zero; ExeOrderIndex index = ExeOrderIndex::Initial; bool isReadable : 1; bool isWritable : 1; // methods BufferAccess () : isReadable{false}, isWritable{false} {} }; using AccessRecords_t = Array< BufferAccess >; // TODO: fixed size array or custom allocator using AccessIter_t = AccessRecords_t::iterator; // variables private: Ptr<VBuffer const> _bufferData; // readonly access is thread safe mutable AccessRecords_t _pendingAccesses; mutable AccessRecords_t _accessForWrite; mutable AccessRecords_t _accessForRead; mutable bool _isImmutable = false; // methods public: VLocalBuffer () {} VLocalBuffer (VLocalBuffer &&) = delete; VLocalBuffer (const VLocalBuffer &) = delete; ~VLocalBuffer (); bool Create (const VBuffer *); void Destroy (); void SetInitialState (bool immutable) const; void AddPendingState (const BufferState &state) const; void ResetState (ExeOrderIndex index, VBarrierManager &barrierMngr, Ptr<VLocalDebugger> debugger) const; void CommitBarrier (VBarrierManager &barrierMngr, Ptr<VLocalDebugger> debugger) const; ND_ bool IsCreated () const { return _bufferData != null; } ND_ VkBuffer Handle () const { return _bufferData->Handle(); } ND_ VBuffer const* ToGlobal () const { return _bufferData.get(); } ND_ BufferDesc const& Description () const { return _bufferData->Description(); } ND_ BytesU Size () const { return Description().size; } ND_ StringView GetDebugName () const { return _bufferData->GetDebugName(); } private: ND_ static AccessIter_t _FindFirstAccess (AccessRecords_t &arr, const BufferRange &range); static void _ReplaceAccessRecords (INOUT AccessRecords_t &arr, AccessIter_t iter, const BufferAccess &barrier); static AccessIter_t _EraseAccessRecords (INOUT AccessRecords_t &arr, AccessIter_t iter, const BufferRange &range); }; } // FG
1,016
451
#include "include/MMVII_all.h" /* Caracteristiques envisagees : * L1 sur tout pixel * L1 pondere par 1/Scale sur tout pixel * L1 sur le + petit rayon * census (pondere ?) * correlation (pondere ? 1 pixel? 2 pixel ? ...) * gradient de L1 sur paralaxe (mesure ambiguite residuelle) * mesure sur gradient (rho ? theta ? ...) */ namespace MMVII { bool TESTPT(const cPt2dr & aPt,int aLine,const std::string& aFile); #define TPT(AP) TESTPT(AP,__LINE__,__FILE__) void AddData(const cAuxAr2007 & anAux,eModeCaracMatch & aMCM); typedef std::vector<eModeCaracMatch> tVecCar; std::string NameVecCar(const tVecCar &); class cComputeSeparDist { public : cComputeSeparDist(); void AddPops(double aPopA,double aPopB); double Sep() const; // Something like mSomSep / mSomP private : double mSomSep; ///< S(AB/(A+B)) double mSomP; ///< S(A+B) }; template <class Type> double ComputeSep(const Type * aD1,const Type * aD2,int aNb); template <class Type,int Dim> double ComputeSep(const cDataTypedIm<Type,Dim> &,const cDataTypedIm<Type,Dim> &); extern bool DEBUG_LM; class cAppliLearningMatch : public cMMVII_Appli { public : const int & NbOct() const; const int & NbLevByOct() const; const int & NbOverLapByO() const; static const int SzMaxStdNeigh() {return 8;} protected : cAppliLearningMatch(const std::vector<std::string> & aVArgs,const cSpecMMVII_Appli & aSpec); void SetNamesProject (const std::string & aNameInput,const std::string & aNameOutput) ; std::string Prefix(bool isIn) const; static std::string Post(bool isXml) ; std::string DirVisu() const; std::string DirResult() const; std::string SubDirResult(bool isIn) const; std::string FileHisto1Carac(bool isIn,bool isXml=false) const ; std::string NameReport() const; std::string FileHistoNDIm(const std::string &,bool IsIn) const; static std::string PrefixAll(); static std::string Im1(); static std::string Im2(); static std::string Px1(); static std::string Px2(); static std::string Masq1(); static std::string Masq2(); static bool IsFromType(const std::string & aName,const std::string & aPost); static bool IsIm1(const std::string & aName); static bool IsIm2(const std::string & aName); static bool Im1OrIm2(const std::string & aName); // Generate an error if none static std::string MakeName(const std::string & aName,const std::string & aPref) ; static void GenConvertIm(const std::string & aInput, const std::string & aOutput); static std::string NameIm1(const std::string & aName); static std::string NameIm2(const std::string & aName); static std::string NamePx1(const std::string & aName); static std::string NamePx2(const std::string & aName); static std::string NameMasq1(const std::string & aName); static std::string NameMasq2(const std::string & aName); static std::string NameRedrIm1(const std::string & aName); static std::string NameRedrIm2(const std::string & aName); static void ConvertIm1(const std::string & aInput,const std::string & aName); static void ConvertIm2(const std::string & aInput,const std::string & aName); static std::string Im2FromIm1(const std::string & aIm1); static std::string Px1FromIm1(const std::string & aIm1); static std::string Masq1FromIm1(const std::string & aIm1); static std::string Px2FromIm2(const std::string & aIm2); static std::string Masq2FromIm2(const std::string & aIm2); static std::string PxFromIm(const std::string & aIm12); static std::string MasqFromIm(const std::string & aIm12); // static std::string Ext(bool isXml); static std::string PrefixHom(); static std::string Hom(int aNum); static std::string Index(int aNum); static std::string HomFromIm1(const std::string & aIm1,int aNumHom,std::string anExt,bool isXml=false); static std::string HomFromHom0(const std::string & aName,int aNumHom); private : std::string mNameInput; std::string mNameOutput; int mNbOct; // 3 octave for window , maybe add 2 learning multiscale int mNbLevByOct; // more or less minimalist int mNbOverLapByO; // 1 overlap is required for junction at decimation }; class cPyr1ImLearnMatch : public cMemCheck { public : typedef cGaussianPyramid<tREAL4> tPyr; typedef std::shared_ptr<tPyr> tSP_Pyr; typedef cIm2D<tREAL4> tImFiltred; typedef cDataIm2D<tREAL4> tDataImF; cPyr1ImLearnMatch ( const cBox2di & aBox, const cBox2di & aBoxOut, // Required by pyram but apparently not used const std::string & aName, const cAppliLearningMatch &, const cFilterPCar&, bool initRand ); cPyr1ImLearnMatch(const cPyr1ImLearnMatch &) = delete; void SaveImFiltered() const; bool CalculAimeDesc(const cPt2dr & aPt); double MulScale() const; const tDataImF & ImInit() const; const tDataImF & ImFiltered() const; cAimePCar DupLPIm() const; // ~cPyr1ImLearnMatch(); private : cBox2di mBox; std::string mNameIm; const cAppliLearningMatch & mAppli; cGP_Params mGP; tSP_Pyr mPyr; tImFiltred mImF; cAimePCar mPC; }; class cVecCaracMatch : public cMemCheck { public : typedef cDataIm2D<tREAL4> tDataIm; static constexpr int TheDyn4Save = 20000; static constexpr int TheDyn4Visu = 1000; static int ToVisu(int aVal) {return std::min(TheDyn4Visu-1,(aVal*TheDyn4Visu)/TheDyn4Save);} static int FromVisu(int aVal) {return std::min(TheDyn4Save-1,(aVal*TheDyn4Save)/TheDyn4Visu);} static cPt2di ToVisu(const cPt2di & aPt) {return cPt2di(ToVisu(aPt.x()),ToVisu(aPt.y()));} static constexpr int TheUnDefVal = TheDyn4Save +1; static constexpr int TheNbVals = int (eModeCaracMatch::eNbVals); typedef tU_INT2 tSaveValues; void SetValue(eModeCaracMatch aCarac,const float & aVal); const tSaveValues & Value(eModeCaracMatch aCarac) const ; cVecCaracMatch ( float aScaleRho, const tDataIm & aImInit1,const tDataIm & aImInit2, const tDataIm & aImNorm1,const tDataIm & aImNorm2, const cAimePCar &,const cAimePCar & ); cVecCaracMatch ( const cPyr1ImLearnMatch & aPyr1,const cPyr1ImLearnMatch & aPyr2, const cAimePCar &,const cAimePCar & ); /* cVecCaracMatch ( float aScaleRho,float aGrayLev1,float aGrayLev2, const cAimePCar &,const cAimePCar & ); */ cVecCaracMatch() ; void AddData(const cAuxAr2007 & anAux); void Show(tNameSelector); void FillVect(cDenseVect<tINT4> &,const tVecCar &) const; private : tSaveValues mVecCarac[TheNbVals]; }; void AddData(const cAuxAr2007 & anAux, cVecCaracMatch & aVCM); class cFileVecCaracMatch : public cMemCheck { public : cFileVecCaracMatch(const cFilterPCar &,int aNb); cFileVecCaracMatch(const std::string &); ///< From file void AddCarac(const cVecCaracMatch &); void AssertCompatible(const cFileVecCaracMatch &); void AddData(const cAuxAr2007 & anAux); const std::vector<cVecCaracMatch> & VVCM() const; private : int mNbVal; cFilterPCar mFPC; std::vector<cVecCaracMatch> mVVCM; std::string mCheckRW; // to check read/write works }; void AddData(const cAuxAr2007 & anAux, cFileVecCaracMatch & aVCM); class cStatOneVecCarac : public cMemCheck { public : typedef cHistoCumul<tINT4,tREAL8> tHisto; static constexpr int TheDyn4Save = cVecCaracMatch::TheDyn4Save; static constexpr int TheDyn4Visu = cVecCaracMatch::TheDyn4Visu; static constexpr int TheNbH = 3; cStatOneVecCarac(const cPt2di & aSzCr = cPt2di(1,1)); void Add(int aNum,int aVal) { Hist(aNum).AddV(aVal,1); } double Separ(int aN1,int aN2) const; // Compute separability betwen Hist[N1] and Hist[N2] void Inspect(const cStatOneVecCarac &); cDataIm2D<tINT4> & ImCr(bool Close); const cDataIm2D<tINT4> & ImCr(bool Close) const; double FiabCr(bool Close) const; void SaveCr(int aDeZoom,bool isClose,const std::string &); void SaveHisto(int aSz,const std::string &); // Reduce size of mImCr01, wich are note usefull for saving void PackForSave(); void MakeCumul(); void AddData(const cAuxAr2007 & anAux); tHisto & Hist(int aNum); const tHisto & Hist(int aNum) const; const tHisto & HistSom(int aFlag) const; private : tHisto mHist[TheNbH]; mutable tHisto mHistSom; cIm2D<tINT4> mImCr01; // Contain stat of Hom/CloseHom cIm2D<tINT4> mImCr02; // Contain stat of Hom/NonHom }; void AddData(const cAuxAr2007 & anAux,cStatOneVecCarac&); class cStatAllVecCarac : public cMemCheck { public : static constexpr int TheNbVals = int (eModeCaracMatch::eNbVals); static constexpr int TheDyn4Save = cVecCaracMatch::TheDyn4Save; static constexpr int TheDyn4Visu = cVecCaracMatch::TheDyn4Visu; cStatAllVecCarac(bool WithCrois); void AddOneFile(int aNum,const cFileVecCaracMatch &); void AddCr(const cFileVecCaracMatch &,const cFileVecCaracMatch &,bool isClose); void ShowSepar(const std::string & aPat,cMultipleOfs &); void Inspect(); void SaveCr(int aDeZoom,const std::string &aDir); void SaveHisto(int aSz,const std::string &aDir); void PackForSave(); // Supress Cr to reduce size void AddData(const cAuxAr2007 & anAux); void MakeCumul(); const cStatOneVecCarac & OneStat(eModeCaracMatch) const; private : bool mWithCr; cPt2di mSzCr; std::vector<cStatOneVecCarac> mStats; }; void AddData(const cAuxAr2007 & anAux,cStatAllVecCarac&); // Class to represent statistic of conbination of criterion, // memorize the histogramm of Hom & Nom Hom, for each dimension the value // are resampled according to some histogramm equalization // class cHistoCarNDim : public cMemCheck { public : typedef tINT4 tDataNd; typedef cDataGenDimTypedIm<tDataNd> tHistND; typedef cHistoCumul<tINT4,tREAL8> tHisto1; typedef cDenseVect<tINT4> tIndex; typedef cDenseVect<tREAL4> tRIndex; cHistoCarNDim(int aSzH,const tVecCar &,const cStatAllVecCarac &,bool genVis2DI); cHistoCarNDim(); // Used for AddData requiring default cstrc cHistoCarNDim(const std::string&); // Used for AddData requiring default cstrc ~cHistoCarNDim(); // Used for AddData requiring default cstrc void Add(const cVecCaracMatch &,bool isH0); void Show(cMultipleOfs &,bool WithCr) const; void AddData(const cAuxAr2007 &); const std::string & Name() const; /* Score Cr/CarSep CarSep : separation between the 2 dist using cComputeSeparDist HomologyLikelihood(V) : Likelihood that a given vector is homolog UpDateCorrectness(Hom,NHom) : update the the proba that HomologyLikelihood(Hom) > HomologyLikelihood(NonHom) Correctness() : global proba of HomologyLikelihood(Hom) > HomologyLikelihood(NonHom) */ double CarSep() const; double Correctness() const; double HomologyLikelihood(const cVecCaracMatch &,bool Interpol) const; void UpDateCorrectness(const cVecCaracMatch & aHom,const cVecCaracMatch & aNotHom); // Generate 4 Visu : // Hom, Non Hom, Score, Pop void GenerateVisu(const std::string & aDir); // Generate 2 Visu in initial dynamic : hom & non hom void GenerateVis2DInit(const std::string & aDir); private : // Generarte visu of one Histogramme void GenerateVisuOneIm(const std::string & aDir,const std::string & aPrefix,const tHistND &); // void GenerateVis2DInitOneInit(const std::string & aDir,const std::string & aPrefix,cIm2D<double>,const tHistND&); void ComputePts(const cVecCaracMatch &) const; cHistoCarNDim(const cHistoCarNDim &) = delete; bool mIP; int mDim; tIndex mSz; tVecCar mVCar; mutable tIndex mPts; mutable tIndex mPtsInit; // Memorize Pts before Histo Equal, for visualization mutable tRIndex mRPts; // Real Pts std::vector<const tHisto1*> mHd1_0; tHistND mHist0; // Homolog tHistND mHist2; // Non (or close) Homolog std::string mName; double mNbOk; double mNbNotOk; bool mGV2I; cIm2D<double> mHistoI0; cIm2D<double> mHistoI2; }; void AddData(const cAuxAr2007 & anAux,cHistoCarNDim & aHND); };
6,591
600
/* * OpenBOR - http://www.chronocrash.com * ----------------------------------------------------------------------- * All rights reserved, see LICENSE in OpenBOR root for details. * * Copyright (c) 2004 - 2014 OpenBOR Team */ #include "Stack.h" void Stack_Push( Stack *stack, void *e) { List_Reset(stack); List_InsertBefore(stack, e, NULL ); } void Stack_Pop(Stack *stack ) { List_Reset(stack); List_Remove(stack); } void *Stack_Top(const Stack *stack) { return List_Retrieve(stack); } int Stack_IsEmpty(const Stack *stack) { return (List_GetSize(stack) == 0); } void Stack_Init(Stack *stack) { List_Init(stack); }
232
635
<filename>alembic/versions/098ab7c733ea_add_user_to_flag.py """add user_to_flag Revision ID: 098ab7c733ea Revises: <KEY> Create Date: 2021-08-08 23:03:49.444306 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = "098ab7c733ea" down_revision = "<KEY>" branch_labels = None depends_on = None def upgrade(): op.create_table( "user_to_flag", sa.Column( "user_id", sa.Integer, sa.ForeignKey("user.id", ondelete="CASCADE"), nullable=False, ), sa.Column( "flag_id", sa.Integer, sa.ForeignKey("flag.id", ondelete="CASCADE"), nullable=False, ), ) op.add_column("category", sa.Column("_description", sa.VARCHAR(1024))) op.add_column("user", sa.Column("_expire", sa.DateTime)) def downgrade(): op.drop_table("user_to_flag") op.drop_column("category", "_description") op.drop_column("user", "_expire")
475
303
from sqlobject import * from sqlobject.tests.dbtest import * ######################################## ## Transaction test ######################################## class TestSOTrans(SQLObject): #_cacheValues = False class sqlmeta: defaultOrder = 'name' name = StringCol(length=10, alternateID=True, dbName='name_col') def test_transaction(): if not supports('transactions'): return setupClass(TestSOTrans) TestSOTrans(name='bob') TestSOTrans(name='tim') trans = TestSOTrans._connection.transaction() try: TestSOTrans._connection.autoCommit = 'exception' TestSOTrans(name='joe', connection=trans) trans.rollback() trans.begin() assert ([n.name for n in TestSOTrans.select(connection=trans)] == ['bob', 'tim']) b = TestSOTrans.byName('bob', connection=trans) b.name = 'robert' trans.commit() assert b.name == 'robert' b.name = 'bob' trans.rollback() trans.begin() assert b.name == 'robert' finally: TestSOTrans._connection.autoCommit = True def test_transaction_commit_sync(): if not supports('transactions'): return setupClass(TestSOTrans) trans = TestSOTrans._connection.transaction() try: TestSOTrans(name='bob') bOut = TestSOTrans.byName('bob') bIn = TestSOTrans.byName('bob', connection=trans) bIn.name = 'robert' assert bOut.name == 'bob' trans.commit() assert bOut.name == 'robert' finally: TestSOTrans._connection.autoCommit = True def test_transaction_delete(close=False): if not supports('transactions'): return setupClass(TestSOTrans) trans = TestSOTrans._connection.transaction() try: TestSOTrans(name='bob') bIn = TestSOTrans.byName('bob', connection=trans) bIn.destroySelf() bOut = TestSOTrans.select(TestSOTrans.q.name=='bob') assert bOut.count() == 1 bOutInst = bOut[0] bOutID = bOutInst.id trans.commit(close=close) assert bOut.count() == 0 raises(SQLObjectNotFound, "TestSOTrans.get(bOutID)") raises(SQLObjectNotFound, "bOutInst.name") finally: trans.rollback() TestSOTrans._connection.autoCommit = True def test_transaction_delete_with_close(): test_transaction_delete(close=True)
1,044
303
<filename>www/draw/metadata/2/2274.json {"id":2274,"line-1":"Community of Madrid","line-2":"Spain","attribution":"©2014 DigitalGlobe","url":"https://www.google.com/maps/@40.468889,-3.722294,18z/data=!3m1!1e3"}
83
915
package org.nzbhydra.tests.pageobjects; import org.nzbhydra.misc.Sleep; import org.openqa.selenium.By; import org.openqa.selenium.WebElement; import org.popper.fw.webdriver.elements.impl.AbstractWebElement; import org.popper.fw.webdriver.elements.impl.WebElementReference; import java.util.List; public class ColumnSortable extends AbstractWebElement implements IColumnSortable { public ColumnSortable(WebElementReference reference) { super(reference); } @Override public void toggleSort() { if (isSorted()) { getElement().findElement(By.className("marker-sortable")).click(); } else { getElement().findElement(By.className("text-sortable")).click(); } } @Override public void sortAscending() { if (!isSorted() || !isSortedAscending()) { toggleSort(); } Sleep.sleep(100); if (!isSortedAscending()) { toggleSort(); } Sleep.sleep(100); if (!isSortedAscending()) { throw new RuntimeException("Unable to sort ascending"); } } @Override public void sortDescending() { if (!isSorted() || !isSortedDescending()) { toggleSort(); } Sleep.sleep(100); if (!isSortedDescending()) { toggleSort(); } Sleep.sleep(100); if (!isSortedDescending()) { toggleSort(); } Sleep.sleep(100); if (!isSortedDescending()) { throw new RuntimeException("Unable to sort descending"); } } public boolean isSorted() { return isSortedAscending() || isSortedDescending(); } public boolean isSortedAscending() { List<WebElement> elements = getElement().findElements(By.cssSelector(".marker-sortable.glyphicon-triangle-top")); if (elements.isEmpty()) { return false; } return elements.get(0).isDisplayed(); } public boolean isSortedDescending() { List<WebElement> elements = getElement().findElements(By.cssSelector(".marker-sortable.glyphicon-triangle-bottom")); if (elements.isEmpty()) { return false; } return elements.get(0).isDisplayed(); } }
995